You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2020/03/18 12:10:14 UTC

Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #908

See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/908/display/redirect>

Changes:


------------------------------------------
[...truncated 63.04 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, idna, certifi, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0318121009-5698dd53_b18203c2-dbd1-4fa1-9aef-2bcfe11ee305 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 18s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ftzmi3kwjhzqg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1158

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1158/display/redirect?page=changes>

Changes:

[ankurgoenka] [BEAM-9735] Adding Always trigger and using it in Reshuffle

[boyuanz] [BEAM-9562] Update Element.timer, Element.Timer to Element.timers and


------------------------------------------
[...truncated 64.30 KB...]
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.22.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, certifi, chardet, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:grpc._channel:Exception iterating requests!
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/lib/python3.7/site-packages/grpc/_channel.py",> line 195, in consume_request_iterator
    request = next(request_iterator)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/artifact_service.py",> line 316, in __next__
    raise self._queue.get()
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 192, in stage
    return self._stage_via_portable_service(channel, staging_session_token)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 210, in _stage_via_portable_service
    staging_session_token)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/artifact_service.py",> line 506, in offer_artifacts
    for request in requests:
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/lib/python3.7/site-packages/grpc/_channel.py",> line 416, in __next__
    return self._next()
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/lib/python3.7/site-packages/grpc/_channel.py",> line 706, in _next
    raise self
grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with:
	status = StatusCode.UNIMPLEMENTED
	details = "Method not found: org.apache.beam.model.job_management.v1.ArtifactStagingService/ReverseArtifactRetrievalService"
	debug_error_string = "{"created":"@1586556158.516456243","description":"Error received from peer ipv4:127.0.0.1:44631","file":"src/core/lib/surface/call.cc","file_line":1056,"grpc_message":"Method not found: org.apache.beam.model.job_management.v1.ArtifactStagingService/ReverseArtifactRetrievalService","grpc_status":12}"
>
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 571, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0410220238-ec417907_93d3a043-6e78-4b9c-bc5a-d282b5af73af failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 49s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/4qh4utern3jmm

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1157

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1157/display/redirect?page=changes>

Changes:

[robertwb] Attempt to stage resources via new API in portable runner.

[pabloem] Fix from_container_image call


------------------------------------------
[...truncated 64.50 KB...]
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.22.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, idna, chardet, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:grpc._channel:Exception iterating requests!
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/lib/python3.7/site-packages/grpc/_channel.py",> line 195, in consume_request_iterator
    request = next(request_iterator)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/artifact_service.py",> line 316, in __next__
    raise self._queue.get()
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 192, in stage
    return self._stage_via_portable_service(channel, staging_session_token)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 210, in _stage_via_portable_service
    staging_session_token)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/artifact_service.py",> line 506, in offer_artifacts
    for request in requests:
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/lib/python3.7/site-packages/grpc/_channel.py",> line 416, in __next__
    return self._next()
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/lib/python3.7/site-packages/grpc/_channel.py",> line 706, in _next
    raise self
grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with:
	status = StatusCode.UNIMPLEMENTED
	details = "Method not found: org.apache.beam.model.job_management.v1.ArtifactStagingService/ReverseArtifactRetrievalService"
	debug_error_string = "{"created":"@1586549836.278756654","description":"Error received from peer ipv4:127.0.0.1:45827","file":"src/core/lib/surface/call.cc","file_line":1056,"grpc_message":"Method not found: org.apache.beam.model.job_management.v1.ArtifactStagingService/ReverseArtifactRetrievalService","grpc_status":12}"
>
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 571, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0410201716-ed9f842_87f1a94b-20c1-4780-a88e-36d0a5886ade failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 11m 9s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ryr2mgyyfeeam

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1156

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1156/display/redirect?page=changes>

Changes:

[veblush] Upgrades gcsio to 2.1.2


------------------------------------------
[...truncated 63.93 KB...]
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.22.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, chardet, urllib3, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
Traceback (most recent call last):
  File "<string>", line 23, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 501, in run
    self._options).run(False)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 514, in run
    return self.runner.run_pipeline(self, self._options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/spark_runner.py",> line 47, in run_pipeline
    return super(SparkRunner, self).run_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 403, in run_pipeline
    proto_pipeline = self.get_proto_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 320, in get_proto_pipeline
    portable_options))
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 282, in _create_environment
    return env_class.from_options(portable_options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/transforms/environments.py",> line 255, in from_options
    return from_container_image(
NameError: name 'from_container_image' is not defined

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 59s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/odupbeiagszfy

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1155

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1155/display/redirect?page=changes>

Changes:

[github] Add --region to changelog


------------------------------------------
[...truncated 63.24 KB...]
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.22.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
Traceback (most recent call last):
  File "<string>", line 23, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 501, in run
    self._options).run(False)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 514, in run
    return self.runner.run_pipeline(self, self._options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/spark_runner.py",> line 47, in run_pipeline
    return super(SparkRunner, self).run_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 403, in run_pipeline
    proto_pipeline = self.get_proto_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 320, in get_proto_pipeline
    portable_options))
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 282, in _create_environment
    return env_class.from_options(portable_options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/transforms/environments.py",> line 255, in from_options
    return from_container_image(
NameError: name 'from_container_image' is not defined

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 56s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/dl6dm6x43g3ow

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1154

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1154/display/redirect?page=changes>

Changes:

[samuelw] [BEAM-9651] Prevent StreamPool and stream initialization livelock


------------------------------------------
[...truncated 63.50 KB...]
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.22.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
Traceback (most recent call last):
  File "<string>", line 23, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 501, in run
    self._options).run(False)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 514, in run
    return self.runner.run_pipeline(self, self._options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/spark_runner.py",> line 47, in run_pipeline
    return super(SparkRunner, self).run_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 403, in run_pipeline
    proto_pipeline = self.get_proto_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 320, in get_proto_pipeline
    portable_options))
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 282, in _create_environment
    return env_class.from_options(portable_options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/transforms/environments.py",> line 255, in from_options
    return from_container_image(
NameError: name 'from_container_image' is not defined

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 27s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/jtzyoujhs46da

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1153

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1153/display/redirect?page=changes>

Changes:

[michal.walenia] [BEAM-9734] Revert #11122


------------------------------------------
[...truncated 523.96 KB...]
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.22.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, idna, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
Traceback (most recent call last):
  File "<string>", line 23, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 501, in run
    self._options).run(False)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 514, in run
    return self.runner.run_pipeline(self, self._options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/spark_runner.py",> line 47, in run_pipeline
    return super(SparkRunner, self).run_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 403, in run_pipeline
    proto_pipeline = self.get_proto_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 320, in get_proto_pipeline
    portable_options))
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 282, in _create_environment
    return env_class.from_options(portable_options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/transforms/environments.py",> line 255, in from_options
    return from_container_image(
NameError: name 'from_container_image' is not defined

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 13s
65 actionable tasks: 60 executed, 4 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/mb4u5osbouv4m

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1152

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1152/display/redirect>

Changes:


------------------------------------------
[...truncated 63.44 KB...]
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.22.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, certifi, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
Traceback (most recent call last):
  File "<string>", line 23, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 501, in run
    self._options).run(False)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 514, in run
    return self.runner.run_pipeline(self, self._options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/spark_runner.py",> line 47, in run_pipeline
    return super(SparkRunner, self).run_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 403, in run_pipeline
    proto_pipeline = self.get_proto_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 320, in get_proto_pipeline
    portable_options))
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 282, in _create_environment
    return env_class.from_options(portable_options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/transforms/environments.py",> line 255, in from_options
    return from_container_image(
NameError: name 'from_container_image' is not defined

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 17s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/azcdjlep7nwpg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1151

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1151/display/redirect>

Changes:


------------------------------------------
[...truncated 63.67 KB...]
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.22.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
Traceback (most recent call last):
  File "<string>", line 23, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 501, in run
    self._options).run(False)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 514, in run
    return self.runner.run_pipeline(self, self._options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/spark_runner.py",> line 47, in run_pipeline
    return super(SparkRunner, self).run_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 403, in run_pipeline
    proto_pipeline = self.get_proto_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 320, in get_proto_pipeline
    portable_options))
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 282, in _create_environment
    return env_class.from_options(portable_options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/transforms/environments.py",> line 255, in from_options
    return from_container_image(
NameError: name 'from_container_image' is not defined

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 32s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/xn7gvbvi6kf6w

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1150

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1150/display/redirect?page=changes>

Changes:

[kcweaver] Moving to 2.22.0-SNAPSHOT on master branch.


------------------------------------------
[...truncated 523.78 KB...]
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.22.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, chardet, idna, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
Traceback (most recent call last):
  File "<string>", line 23, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 501, in run
    self._options).run(False)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 514, in run
    return self.runner.run_pipeline(self, self._options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/spark_runner.py",> line 47, in run_pipeline
    return super(SparkRunner, self).run_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 403, in run_pipeline
    proto_pipeline = self.get_proto_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 320, in get_proto_pipeline
    portable_options))
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 282, in _create_environment
    return env_class.from_options(portable_options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/transforms/environments.py",> line 255, in from_options
    return from_container_image(
NameError: name 'from_container_image' is not defined

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 15s
65 actionable tasks: 63 executed, 1 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/nymah3vm2hacc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1149

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1149/display/redirect?page=changes>

Changes:

[github] add missing bracket


------------------------------------------
[...truncated 63.92 KB...]
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
Traceback (most recent call last):
  File "<string>", line 23, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 501, in run
    self._options).run(False)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 514, in run
    return self.runner.run_pipeline(self, self._options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/spark_runner.py",> line 47, in run_pipeline
    return super(SparkRunner, self).run_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 403, in run_pipeline
    proto_pipeline = self.get_proto_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 320, in get_proto_pipeline
    portable_options))
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 282, in _create_environment
    return env_class.from_options(portable_options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/transforms/environments.py",> line 255, in from_options
    return from_container_image(
NameError: name 'from_container_image' is not defined

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 9s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/6ley5ayydpwds

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1148

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1148/display/redirect?page=changes>

Changes:

[boyuanz] [BEAM-9562, BEAM-6274] Fix-up timers to use Elements.Timer proto in data

[robertwb] Allow unset write threshold for state backed iterable coder.

[github] Update environments.py to add a method to specify container image


------------------------------------------
[...truncated 63.79 KB...]
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
Traceback (most recent call last):
  File "<string>", line 23, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 501, in run
    self._options).run(False)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py",> line 514, in run
    return self.runner.run_pipeline(self, self._options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/spark_runner.py",> line 47, in run_pipeline
    return super(SparkRunner, self).run_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 403, in run_pipeline
    proto_pipeline = self.get_proto_pipeline(pipeline, options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 320, in get_proto_pipeline
    portable_options))
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 282, in _create_environment
    return env_class.from_options(portable_options)
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/transforms/environments.py",> line 255, in from_options
    return from_container_image(
NameError: name 'from_container_image' is not defined

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 34s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/c4tczojssjoda

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1147

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1147/display/redirect?page=changes>

Changes:

[github] [BEAM-9727] Automatically set required experiment flags for dataflow


------------------------------------------
[...truncated 63.15 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0410002210-6526771d_60f56e43-8ddc-439b-b29f-cf31a00475c0 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 26s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/vljxllkg53bji

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1146

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1146/display/redirect?page=changes>

Changes:

[github] Revert "[BEAM-9651] Prevent StreamPool and stream initialization


------------------------------------------
[...truncated 63.35 KB...]

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, urllib3, idna, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:root:Waiting for grpc channel to be ready at localhost:57079.
WARNING:root:Waiting for grpc channel to be ready at localhost:57079.
WARNING:root:Waiting for grpc channel to be ready at localhost:57079.
WARNING:root:Waiting for grpc channel to be ready at localhost:57079.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409234846-b34ba830_11b01cf5-3d2b-4577-8cba-406587c40088 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR
>>> FAILURE

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 11m 46s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/5txe2r6xcysju

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1145

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1145/display/redirect?page=changes>

Changes:

[samuelw] [BEAM-9651] Prevent StreamPool and stream initialization livelock


------------------------------------------
[...truncated 62.99 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409221307-cfcca267_70e1b834-5386-4d40-9f12-6e4bfe2442bf failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 5s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/p24q7yypfmo2c

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1144

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1144/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-4374] Fix missing deletion of metrics.


------------------------------------------
[...truncated 63.17 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409210957-e0a11628_28c47287-21e7-4fb0-a8b9-3cb785ba66ff failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 15s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/djohmwer7v2ya

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1143

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1143/display/redirect?page=changes>

Changes:

[kcweaver] [BEAM-9726] [py] Make region optional for non-service Dataflow.

[kcweaver] [BEAM-9726] [java] Make region optional for non-service runner.


------------------------------------------
[...truncated 63.40 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409204456-1611f809_cc2798e8-629a-4758-8a12-e79f79507653 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 57s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/js3xr2mrah7xo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1142

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1142/display/redirect>

Changes:


------------------------------------------
[...truncated 62.97 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, certifi, chardet, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409183346-7ee94242_70dd2430-7b8e-4a83-88c2-24cd95ab4acf failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 57s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/cylibs3vnm3fi

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1141

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1141/display/redirect?page=changes>

Changes:

[github] [BEAM-9085] Fix performance regression in SyntheticSource on Python 3


------------------------------------------
[...truncated 62.93 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
WARNING: Retrying (Retry(total=9, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ProtocolError('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))': /simple/six/
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, chardet, certifi, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409180628-8b832b21_6a5f508c-e964-4e8f-b265-a5b6690e8d03 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 16s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/pdepxlwxkmz6m

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1140

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1140/display/redirect?page=changes>

Changes:

[github] [BEAM-9731] Include more detail in passert.Equals errors. (#11359)


------------------------------------------
[...truncated 62.95 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, certifi, chardet, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409175231-6c7470de_6556d9b2-49a4-4a58-bd18-2beb073df150 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 38s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/t5ctp7jrjdn4w

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1139

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1139/display/redirect?page=changes>

Changes:

[github] [BEAM-8280] Document Python 3 annotations support (#11232)


------------------------------------------
[...truncated 62.95 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409171037-67f2a0da_fcf7e1fd-7345-41c0-935c-6d67a17945ac failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 4s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/cbyjtcvkdmam2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1138

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1138/display/redirect>

Changes:


------------------------------------------
[...truncated 63.16 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, urllib3, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409122021-ed749bd6_ed3bc4c4-cf62-4e00-84c0-708f2a6ae9a6 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 32s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/rg2yn3kvzaupg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1137

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1137/display/redirect?page=changes>

Changes:

[michael.jacoby] [BEAM-9647] fixes MQTT clientId to long


------------------------------------------
[...truncated 63.39 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, idna, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409115147-688ea53a_81530e11-74aa-4508-a6ed-bb0b1f69c86a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 27s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/6yquxth5byi7y

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1136

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1136/display/redirect?page=changes>

Changes:

[github] [BEAM-9721]Conditionally add Dataflow region to Dataflow-based


------------------------------------------
[...truncated 521.95 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409095333-6eecb36f_1cbc312b-6d37-4545-9ba3-ccf5a59b8956 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 32s
65 actionable tasks: 56 executed, 8 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/c5ukoiaqvwfia

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1135

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1135/display/redirect?page=changes>

Changes:

[github] [BEAM-9550] Increase JVM Metaspace size for the TaskExecutors. (#11193)


------------------------------------------
[...truncated 63.43 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409093220-4c510bfe_0536ed33-a818-4dae-8751-c81d30da44e5 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 48s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/7mqvfh7ks6pz6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1134

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1134/display/redirect?page=changes>

Changes:

[kamil.wasilewski] [BEAM-9721] Add --region to Dataflow-based load tests

[kamil.wasilewski] [BEAM-9721] LoadTestConfig: handle --region parameter and put default


------------------------------------------
[...truncated 63.20 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409092142-15654b98_63aa449d-8669-4a60-b06e-8820095b5b1f failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 33s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/zpvmbsujuar7g

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1133

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1133/display/redirect>

Changes:


------------------------------------------
[...truncated 62.90 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409060958-e94f265d_5f205bef-cb5d-4113-bceb-69e87d380e68 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 38s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/kdk7qbc4r7s7a

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1132

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1132/display/redirect?page=changes>

Changes:

[samuelw] Ensure that empty messages are not flushed to handler.


------------------------------------------
[...truncated 62.98 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, urllib3, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409022845-e6078538_69837c1c-2943-4730-8bea-d98c26fcc4a9 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 14s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/34gcfv33v5jyy

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1131

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1131/display/redirect?page=changes>

Changes:

[chamikara] Adds nose back under packages needed for testing.

[robertwb] [BEAM-9618] Mark push registration as deprecated.


------------------------------------------
[...truncated 63.17 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409021919-348c3a98_bc5952ae-f333-41dc-9a37-9b967edc3fbc failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 29s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/bo4ta3chqiyt2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1130

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1130/display/redirect>

Changes:


------------------------------------------
[...truncated 63.10 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, urllib3, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409003757-2d85333a_0a366423-489d-4bff-b7ad-a237128ca9ef failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 52s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/adootlxtnrw26

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1129

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1129/display/redirect?page=changes>

Changes:

[github] [Beam-9063]update documentation (#10952)


------------------------------------------
[...truncated 63.61 KB...]

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, urllib3, chardet, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:root:Waiting for grpc channel to be ready at localhost:41821.
WARNING:root:Waiting for grpc channel to be ready at localhost:41821.
WARNING:root:Waiting for grpc channel to be ready at localhost:41821.
WARNING:root:Waiting for grpc channel to be ready at localhost:41821.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0409000753-ca5618f6_21740bcd-4aa9-493d-87e7-e095e8709624 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR
>>> FAILURE

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 12m 35s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/7rbwippvmblou

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1128

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1128/display/redirect?page=changes>

Changes:

[kcweaver] [BEAM-9716] Alias zone to worker_zone and warn user.


------------------------------------------
[...truncated 63.39 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, urllib3, idna, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408234045-96a19248_a22bea80-0415-4613-9be0-2fcb6586e232 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 13s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/j52vumjvjvl4c

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1127

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1127/display/redirect?page=changes>

Changes:

[github] [BEAM-9618] Java SDK worker support for pulling bundle descriptors.


------------------------------------------
[...truncated 63.43 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, urllib3, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408224731-17b79741_75d0ec1c-a480-4c18-9c7a-94de2af48fd3 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 13s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/gdbcd5j7cpe6e

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1126

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1126/display/redirect?page=changes>

Changes:

[robertwb] [BEAM-9322] [BEAM-1833] Better naming for composite transform output


------------------------------------------
[...truncated 63.22 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, certifi, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408220845-a20269ea_e2d24fdb-ad8c-4b40-bb25-942c95f1d4f0 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 6s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/zblv7xbik7sec

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1125

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1125/display/redirect?page=changes>

Changes:

[github] Update session.go (#11352)


------------------------------------------
[...truncated 62.94 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, urllib3, certifi, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408213623-af4e106d_405dd1b4-d3da-4aeb-9d94-9757f4c14cb6 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 57s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/xib7uziuk7n6o

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1124

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1124/display/redirect?page=changes>

Changes:

[crites] Updates documentation for WINDOWED_VALUE coder.

[mxm] [BEAM-9596] Ensure metrics are available in PipelineResult when the

[crites] Uses iterable coder for windows and copies all of timestamp encoding


------------------------------------------
[...truncated 63.44 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, urllib3, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408210935-8576bc4d_76ef47c5-c7ce-4dd9-bdfa-264fb9761570 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 3s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/tjhsos7jmdo24

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1123

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1123/display/redirect?page=changes>

Changes:

[github] Update session.go

[github] Update stage.go

[github] Update server_test.go

[github] Update materialize.go

[github] Update materialize_test.go

[github] Update stage_test.go

[github] Update artifact.go

[github] Update provision.go

[github] Update retrieval.go

[github] Update staging.go

[github] Update translate.go

[github] Update datamgr.go

[github] Update datamgr_test.go

[github] Update logging.go

[github] Update logging_test.go

[github] Update monitoring.go

[github] Update session.go

[github] Update statemgr.go

[github] Update statemgr_test.go

[github] Update replace.go

[github] Update replace_test.go

[github] Update provision.go

[github] Update execute.go

[github] Update job.go

[github] Update translate.go

[github] Update translate.go

[github] Update job.go

[github] Update materialize.go

[github] Update translate.go

[github] Update session.go

[github] Update materialize_test.go


------------------------------------------
[...truncated 62.95 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408201609-97b9f7a9_01523a50-9ce9-431c-a37e-5e9a0a46914c failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 7s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/23d3ymughl5qe

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1122

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1122/display/redirect?page=changes>

Changes:

[kcweaver] [BEAM-9714] [Go SDK] Require --region flag in Dataflow runner.


------------------------------------------
[...truncated 63.43 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, urllib3, chardet, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408195242-55fb14aa_503131c1-055a-4cab-975e-25acb0667c41 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 13m 38s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/cwq24hjeoye5e

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1121

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1121/display/redirect>

Changes:


------------------------------------------
[...truncated 63.44 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, certifi, chardet, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408182810-47e03b2_2ad51578-b292-4a4f-b4cb-485ce1a6f5ce failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 39s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/hfmn6hnqgstos

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1120

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1120/display/redirect?page=changes>

Changes:

[mxm] [BEAM-9580] Allow Flink 1.10 processing timers to finish on pipeline

[mxm] Revert "[BEAM-9580] Downgrade Flink version to 1.9 for Nexmark and

[mxm] [BEAM-9557] Fix strings used to verify test output


------------------------------------------
[...truncated 63.45 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, urllib3, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408180053-86f980fb_f4a909c4-2f15-4443-9829-f64c1151dbb5 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 32s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/rpp7q6febv5nw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1119

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1119/display/redirect?page=changes>

Changes:

[github] [BEAM-9147] Add a VideoIntelligence transform to Java SDK (#11261)


------------------------------------------
[...truncated 63.19 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, urllib3, chardet, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408142145-c06f7c8c_c497d3c9-1e69-48e9-9c45-dc52c0e51709 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 49s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/7pfnu65scptce

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1118

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1118/display/redirect>

Changes:


------------------------------------------
[...truncated 63.43 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408120853-bb831ee3_d984f5d8-d267-4c76-95a5-b5791cc4751d failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 34s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/jv5r2q35p7vye

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1117

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1117/display/redirect>

Changes:


------------------------------------------
[...truncated 63.41 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408060947-2cec8e02_6d32b543-2827-425c-b7e0-36aa6aed1341 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 29s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/2kn22ew4ayacw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1116

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1116/display/redirect?page=changes>

Changes:

[ehudm] [BEAM-5422] Document DynamicDestinations.getTable uniqueness requirement


------------------------------------------
[...truncated 63.22 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, chardet, certifi, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408032219-24dd4beb_02390892-ac82-48ac-9528-202e21404aec failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 24s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/fq2hdvf2o5ngc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1115

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1115/display/redirect?page=changes>

Changes:

[github] [BEAM-9529] Remove datastore.v1, googledatastore (#11175)


------------------------------------------
[...truncated 62.92 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, idna, certifi, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408025312-c32b3a4d_6494b728-7b02-4b74-8795-31667a5752ba failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 6s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/al7ygvzvoh7yq

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1114

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1114/display/redirect?page=changes>

Changes:

[robertwb] [BEAM-9577] Plumb resources through Python job service and runner.


------------------------------------------
[...truncated 63.22 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, idna, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408004458-1d6f1f25_c0a07cfc-7d2c-491f-849e-e289edcf8d1e failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 47s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/y6gq6krwmn7tm

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1113

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1113/display/redirect>

Changes:


------------------------------------------
[...truncated 62.95 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, chardet, urllib3, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0408001250-afb842c2_eda90cd4-05e2-47f5-9dd0-758739206ab1 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 36s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/yp4chgr3m226o

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1112

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1112/display/redirect?page=changes>

Changes:

[robertwb] Update go protos.

[robertwb] [BEAM-9618] Pull bundle descriptors for Go.


------------------------------------------
[...truncated 62.96 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407235512-bbe9e5f0_b2755602-6b68-4993-8fb8-9b1456f8f522 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 54s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/4bcx2vrgccoae

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1111

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1111/display/redirect?page=changes>

Changes:

[pabloem] [BEAM-9691] Ensuring BQSource is avoided on FnApi


------------------------------------------
[...truncated 63.22 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407231350-870c75ef_80c4e41b-e7a7-4dd9-9b45-0ee83f0b51a5 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 39s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/2mfqfdymbouaw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1110

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1110/display/redirect?page=changes>

Changes:

[ankurgoenka] [BEAM-9707] Hardcode Unified harness image for fixing dataflow VR 2

[github] Fix some Go SDK linter/vet warnings. (#11330)


------------------------------------------
[...truncated 62.98 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, chardet, urllib3, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407224831-f044b9da_612ad4d3-b9d4-462c-a71b-5f111f0c8a40 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 51s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/tszovgxyqaj3c

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1109

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1109/display/redirect?page=changes>

Changes:

[github] Merge pull request #11205 [BEAM-9578] Defer expensive artifact


------------------------------------------
[...truncated 63.64 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407220935-8c20575e_68a6cc87-9760-4148-b3e7-0f118faa5b23 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 16s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/cp4wfmicgbg2c

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1108

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1108/display/redirect?page=changes>

Changes:

[rohde.samuel] Fix flaky interactive_runner_test


------------------------------------------
[...truncated 62.96 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407210332-a7b87731_03467952-d395-4d3d-9c92-dd726163832c failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 51s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ddrnuuuviin3q

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1107

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1107/display/redirect?page=changes>

Changes:

[pabloem] [BEAM-9715] Ensuring annotations_test passes in all


------------------------------------------
[...truncated 62.99 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407194509-a9f84121_aff1cbfa-3ec0-4fc3-9fbf-367947746080 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 58s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/6on6qqoiiggxm

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1106

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1106/display/redirect?page=changes>

Changes:

[github] Name the pipeline_v1 proto import

[github] Update materialize_test.go


------------------------------------------
[...truncated 63.42 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, urllib3, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407192734-c9dff87a_db34e9c2-835b-402e-b15d-9a4609e812fb failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 3s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/iyvyh6nqsfd5g

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1105

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1105/display/redirect?page=changes>

Changes:

[github] Merge pull request #11244 from [BEAM-3097] _ReadFromBigQuery supports


------------------------------------------
[...truncated 63.00 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, chardet, certifi, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407181141-b961abaf_2eec25fc-e6c1-4098-b159-a06fc997e18e failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 33s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/3tio5dv24fwtm

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1104

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1104/display/redirect?page=changes>

Changes:

[github] Merge pull request #11226: [BEAM-9557] Fix timer window boundary


------------------------------------------
[...truncated 63.20 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, idna, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407171257-1e0ac706_44612e1d-2f81-402d-9f7d-bcaa0ebb8dc0 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 9s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/r64yky5flwj5e

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1103

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1103/display/redirect>

Changes:


------------------------------------------
[...truncated 62.96 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407120937-43e9c900_c7a3e0be-30f6-4aa4-9744-54bbd95d3f5e failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 42s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/gpun2nj66qnn6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1102

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1102/display/redirect?page=changes>

Changes:

[echauchot] [BEAM-5980] Change load-tests build to include spark-sql for spark

[echauchot] [BEAM-9436] avoid one flatmap step and a KV creation per element by


------------------------------------------
[...truncated 63.18 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, certifi, idna, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407082500-e2004a55_5b56a2d2-5792-4ca0-89ac-d82629159970 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 22s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/fs4ajolkwr7mm

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1101

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1101/display/redirect?page=changes>

Changes:

[ecapoccia] [BEAM-9434] Improve Spark runner reshuffle translation to maximize


------------------------------------------
[...truncated 63.66 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, idna, urllib3, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407071405-6a35715c_959341d1-3d15-42e9-bb42-4eba07ec9e0a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 18s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/z3giuhq55dliw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1100

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1100/display/redirect?page=changes>

Changes:

[alex] Add Beam Schema Options to changelog

[alex] [BEAM-9704] Deprecate FieldType metadata


------------------------------------------
[...truncated 63.23 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, idna, chardet, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407063632-bda464b7_47a0ee56-9bff-42e1-9eb3-a78ed710762c failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 26s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/os4ct3pzew62c

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1099

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1099/display/redirect>

Changes:


------------------------------------------
[...truncated 62.98 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, urllib3, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407061150-56b62dca_e682a0bc-db46-448c-8804-8b292121dba3 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 8s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/y4tvs5ctd2vcy

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1098

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1098/display/redirect?page=changes>

Changes:

[eekkaaadrian] [BEAM-9705] Go sdk add value length validation checking on write to


------------------------------------------
[...truncated 63.22 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, idna, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407054305-dae1acb0_f8d30b21-7588-4097-a158-4808e9f85aee failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 55s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/idcjtuvbupllw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1097

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1097/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-4374, BEAM-6189] Delete and remove deprecated Metrics proto

[github] [BEAM-9685] remove Go SDK container from release process (#11308)


------------------------------------------
[...truncated 64.35 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, chardet, urllib3, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407011015-70afc819_2257da36-9393-4afa-8e29-e671ecfacd82 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 52s
65 actionable tasks: 54 executed, 10 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/3r5br3rtnjfna

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1096

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1096/display/redirect?page=changes>

Changes:

[kcweaver] [BEAM-9199] Require Dataflow --region in Python SDK.

[kcweaver] Add --region to tests where needed.

[kcweaver] [BEAM-9199] Require --region option for Dataflow in Java SDK.

[kcweaver] Add --region to Java GCP tests.

[kcweaver] Fix DataflowRunnerTest.

[kcweaver] Fix more Java unit tests missing --region.

[kcweaver] Add --region to DF streaming example tests.

[kcweaver] Add unit tests for get_default_gcp_region

[kcweaver] Add --region to Dataflow runner webpage.

[kcweaver] lint

[kcweaver] Add --region to more Java tests and examples.

[kcweaver] Add --region to more Python tests and examples.

[kcweaver] format

[kcweaver] Remove unrecognized --region option from non-DF tests.


------------------------------------------
[...truncated 63.01 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, certifi, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407004642-6db58b82_cce5318e-765a-40f2-ab20-1254b70dca87 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 55s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/xvk4hsrfxd5m4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1095

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1095/display/redirect>

Changes:


------------------------------------------
[...truncated 63.16 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, urllib3, idna, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0407001339-e46003f2_76d04cf4-0aa3-4c0a-8076-b11fdd081900 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 18s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/wjbkty4dujzne

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1094

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1094/display/redirect?page=changes>

Changes:

[robertwb] [BEAM-9618] Add protocol for requesting process bundle descriptors.

[robertwb] [BEAM-9618] Update Python to support process bundle descriptor fetching.

[robertwb] [BEAM-9618] Java FnApiClient support for process bundle descriptor

[robertwb] Typo fix.


------------------------------------------
[...truncated 62.98 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, certifi, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0406220803-7a13b186_eb8fecc7-67df-4f3f-8d51-94797579b5ad failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 30s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/wchqg3mvs3cwe

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1093

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1093/display/redirect?page=changes>

Changes:

[github] remove nose (#11307)


------------------------------------------
[...truncated 63.38 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, idna, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0406195510-da8f795b_f89d77a3-fbb1-4312-b6e9-b19bcc7cf749 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 56s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/vvkuaugzyysvg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1092

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1092/display/redirect?page=changes>

Changes:

[github] [BEAM-8019] Python SDK support for cross-langauge pipelines in Dataflow.


------------------------------------------
[...truncated 63.43 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, urllib3, idna, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0406185216-f6f76362_7ec75a65-66ab-4c4f-859f-81b9665fa258 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 16s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/odu6l7u5jdvfk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1091

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1091/display/redirect?page=changes>

Changes:

[valentyn] Fixes platform-dependent assumptions in subprocess_server_test.py.

[valentyn] Switches a test helper to a Py3-version thereof.


------------------------------------------
[...truncated 63.01 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0406181147-93a8a4d6_cb46b014-ec0b-4115-b1ce-923bc1e0f7c8 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 43s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/tld4hkcxxymoc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1090

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1090/display/redirect>

Changes:


------------------------------------------
[...truncated 63.41 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0406121122-f813781a_1b7dfd45-4d35-4c2b-a507-a6cf24cbaadd failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 13s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/wed46pf5wbwhs

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1089

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1089/display/redirect?page=changes>

Changes:

[iemejia] [website] Update information about Beam's LTS policy


------------------------------------------
[...truncated 63.43 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, chardet, idna, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0406103118-78ef8797_3f1ff4df-5a43-4686-a729-139c385acf14 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 42s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/fitbwl65h4un2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1088

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1088/display/redirect?page=changes>

Changes:

[mxm] [BEAM-8201] Cleanup FnServices from DockerEnvironmentFactory and


------------------------------------------
[...truncated 63.44 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0406091017-65a3e3f9_c7ba0a11-e6ab-4f16-be1a-7a1b4b4c9160 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 42s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/wq5n43u4sneke

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1087

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1087/display/redirect?page=changes>

Changes:

[kcweaver] [BEAM-9509] Improve error message for bad job server URL.


------------------------------------------
[...truncated 63.64 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, chardet, urllib3, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0406090110-a55b2594_a5e7ae27-ed7a-4103-938a-ad1228d1eb22 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 11m 42s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/f3zdckozqkvw4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1086

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1086/display/redirect>

Changes:


------------------------------------------
[...truncated 63.40 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0406060837-7fa60094_d355ac3b-ff9b-41d8-9f75-72f0fda6eb1a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 19s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ukcctgqtfied2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1085

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1085/display/redirect>

Changes:


------------------------------------------
[...truncated 63.61 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Downloading pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Downloading certifi-2020.4.5.1-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, chardet, certifi, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.1 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0406000852-9bd2e452_83b2fda0-8532-41ce-b5a9-199eb4c42ee7 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 32s
65 actionable tasks: 51 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/mkau3zvl3m5v2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1084

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1084/display/redirect>

Changes:


------------------------------------------
[...truncated 63.40 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, certifi, idna, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0405120945-e595a94c_e9dead84-2375-45ac-937b-2dd22c9cac49 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 24s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/idbhe6rdvnvie

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1083

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1083/display/redirect>

Changes:


------------------------------------------
[...truncated 62.95 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0405060832-a60453db_83e49c52-8401-451a-85a8-e4a72c1ba70a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 42s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/glbh4fo7jowog

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1082

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1082/display/redirect?page=changes>

Changes:

[robertwb] ResolveArtifact -> ResolveArtifacts

[robertwb] Regenerate protos.


------------------------------------------
[...truncated 521.58 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0405045552-5cb69734_7f68c55b-feb9-4605-ac8e-a6ed0a8ea0c6 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 54s
65 actionable tasks: 61 executed, 3 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/tk3yxisvvmtiu

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1081

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1081/display/redirect?page=changes>

Changes:

[github] Merge pull request #11259: Use attachValues in SQL


------------------------------------------
[...truncated 62.92 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0405043235-504f3bc1_087c0042-2201-422c-9ed7-377c7bbc2664 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 46s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/fvxry5tgeafce

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1080

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1080/display/redirect>

Changes:


------------------------------------------
[...truncated 63.40 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, idna, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0405000842-1774dc0_0cd7e5a4-1b45-49bd-9e79-b1211d999b4f failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 24s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/duvskq36v4a2i

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1079

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1079/display/redirect>

Changes:


------------------------------------------
[...truncated 63.63 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0404180906-8a06ca9e_b3ba63e2-a98f-4f5a-b7ca-e96cb7d3e9ec failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 46s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/g5b4gmeqis5zo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1078

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1078/display/redirect>

Changes:


------------------------------------------
[...truncated 63.40 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, chardet, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0404120926-18673c8d_9e9fdb95-c8d1-46c9-908a-ff4f9b936af0 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 8s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/yia7j2rczvywa

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1077

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1077/display/redirect>

Changes:


------------------------------------------
[...truncated 63.40 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, certifi, urllib3, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0404060848-42a72e3f_32612200-562b-42d4-b4ec-31432a92bcbd failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 29s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/74mtusdh35wac

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1076

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1076/display/redirect?page=changes>

Changes:

[aldaircr] Change: Fixing typos on javadoc


------------------------------------------
[...truncated 63.20 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, certifi, idna, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0404022808-c2d5028_8cf61a3c-994a-48ad-b762-857e87806138 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 26s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/em7as3sv6qrza

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1075

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1075/display/redirect>

Changes:


------------------------------------------
[...truncated 63.43 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0404001931-cdfae8f4_44335930-5e6d-483a-b764-25fc3e7f8a8d failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 21s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/vjmty4hbgjnoc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1074

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1074/display/redirect?page=changes>

Changes:

[robertwb] [BEAM-9577] Update container boot code to stage from dependencies, if

[github] Apply suggestions from code review

[robertwb] Use pointer recievers.


------------------------------------------
[...truncated 63.45 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403230812-260eb4fa_bb5c2161-5e59-42ee-b17c-6e08bac277d6 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 12s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ydowkyagw3rng

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1073

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1073/display/redirect?page=changes>

Changes:

[rohde.samuel] Change delimeter to a dash as it is a reserved symbol in Windows


------------------------------------------
[...truncated 63.17 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, chardet, idna, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403221512-15f97436_ee559e46-bf6a-4572-8f14-5b60c4397880 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 39s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/2tk4kn6bsdue6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1072

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1072/display/redirect>

Changes:


------------------------------------------
[...truncated 63.18 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Downloading grpcio-1.28.1-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, chardet, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.1 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403182725-ba745cee_7237fa22-a931-4c64-8746-e5d5240a634f failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 37s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/uyyk54d2d36rc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1071

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1071/display/redirect?page=changes>

Changes:

[boyuanz] Populate source data from SDF


------------------------------------------
[...truncated 63.42 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, chardet, idna, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403172234-3e2604f4_03b41889-7860-4779-a102-8681c109660f failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 0s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/rhzqbhf3q2nly

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1070

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1070/display/redirect?page=changes>

Changes:

[boyuanz] Update Timer encoding


------------------------------------------
[...truncated 63.89 KB...]
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:root:Waiting for grpc channel to be ready at localhost:46633.
WARNING:root:Waiting for grpc channel to be ready at localhost:46633.
WARNING:root:Waiting for grpc channel to be ready at localhost:46633.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403165533-bf92ce2b_a3b53aa0-b56d-4d15-ab1f-bc98f384c317 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR
>>> FAILURE

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 11m 50s
65 actionable tasks: 52 executed, 12 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/fw4b3iyd5xy5i

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1069

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1069/display/redirect?page=changes>

Changes:

[iemejia] [BEAM-9686] Get default TmpCheckpointDir value from PipelineOptions


------------------------------------------
[...truncated 32.36 KB...]
Resolving cached github.com/coreos/etcd: commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', urls=[https://github.com/coreos/etcd.git, git@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', urls=[https://github.com/coreos/etcd.git, git@github.com:coreos/etcd.git]
Resolving github.com/spf13/afero: commit='bb8f1927f2a9d3ab41c9340aa034f6b803f4359c', urls=[https://github.com/spf13/afero.git, git@github.com:spf13/afero.git]
Resolving github.com/spf13/cast: commit='acbeb36b902d72a7a4c18e8f3241075e7ab763e4', urls=[https://github.com/spf13/cast.git, git@github.com:spf13/cast.git]
Resolving github.com/spf13/cobra: commit='93959269ad99e80983c9ba742a7e01203a4c0e4f', urls=[https://github.com/spf13/cobra.git, git@github.com:spf13/cobra.git]
Resolving github.com/spf13/jwalterweatherman: commit='7c0cea34c8ece3fbeb2b27ab9b59511d360fb394', urls=[https://github.com/spf13/jwalterweatherman.git, git@github.com:spf13/jwalterweatherman.git]
Resolving cached github.com/coreos/etcd: commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', urls=[https://github.com/coreos/etcd.git, git@github.com:coreos/etcd.git]
Resolving github.com/spf13/viper: commit='aafc9e6bc7b7bb53ddaa75a5ef49a17d6e654be5', urls=[https://github.com/spf13/viper.git, git@github.com:spf13/viper.git]
Resolving github.com/stathat/go: commit='74669b9f388d9d788c97399a0824adbfee78400e', urls=[https://github.com/stathat/go.git, git@github.com:stathat/go.git]
Resolving cached github.com/coreos/etcd: commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', urls=[https://github.com/coreos/etcd.git, git@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', urls=[https://github.com/coreos/etcd.git, git@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', urls=[https://github.com/coreos/etcd.git, git@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', urls=[https://github.com/coreos/etcd.git, git@github.com:coreos/etcd.git]
Resolving github.com/xordataexchange/crypt: commit='b2862e3d0a775f18c7cfe02273500ae307b61218', urls=[https://github.com/xordataexchange/crypt.git, git@github.com:xordataexchange/crypt.git]
Resolving go.opencensus.io: commit='aa2b39d1618ef56ba156f27cfcdae9042f68f0bc', urls=[https://github.com/census-instrumentation/opencensus-go]
Resolving golang.org/x/crypto: commit='d9133f5469342136e669e85192a26056b587f503', urls=[https://go.googlesource.com/crypto]

> Task :sdks:java:extensions:google-cloud-platform-core:compileJava FROM-CACHE
> Task :sdks:java:extensions:google-cloud-platform-core:classes UP-TO-DATE
> Task :sdks:java:extensions:google-cloud-platform-core:jar
> Task :vendor:sdks-java-extensions-protobuf:compileJava FROM-CACHE
> Task :vendor:sdks-java-extensions-protobuf:classes UP-TO-DATE
> Task :runners:core-construction-java:compileJava FROM-CACHE
> Task :runners:core-construction-java:classes UP-TO-DATE
> Task :sdks:java:fn-execution:compileJava FROM-CACHE
> Task :sdks:java:fn-execution:classes UP-TO-DATE
> Task :sdks:java:fn-execution:jar
> Task :vendor:sdks-java-extensions-protobuf:shadowJar
> Task :runners:core-construction-java:jar
> Task :sdks:java:expansion-service:compileJava FROM-CACHE
> Task :sdks:java:expansion-service:classes UP-TO-DATE
> Task :sdks:java:expansion-service:jar
> Task :runners:core-java:compileJava FROM-CACHE
> Task :runners:core-java:classes UP-TO-DATE
> Task :runners:core-java:jar
> Task :sdks:java:harness:compileJava FROM-CACHE
> Task :sdks:java:harness:classes UP-TO-DATE
> Task :sdks:java:harness:jar

> Task :sdks:python:setupVirtualenv
Installing collected packages: six, filelock, distlib, scandir, pathlib2, singledispatch, contextlib2, zipp, configparser, importlib-metadata, typing, importlib-resources, appdirs, virtualenv, pluggy, py, toml, tox, enum34, futures, grpcio, protobuf, grpcio-tools, future, mypy-protobuf
Successfully installed appdirs-1.4.3 configparser-4.0.2 contextlib2-0.6.0.post1 distlib-0.3.0 enum34-1.1.10 filelock-3.0.12 future-0.16.0 futures-3.3.0 grpcio-1.27.2 grpcio-tools-1.14.2 importlib-metadata-1.6.0 importlib-resources-1.4.0 mypy-protobuf-1.18 pathlib2-2.3.5 pluggy-0.13.1 protobuf-3.11.3 py-1.8.1 scandir-1.10.0 singledispatch-3.4.0.3 six-1.14.0 toml-0.10.0 tox-3.11.1 typing-3.7.4.1 virtualenv-20.0.15 zipp-1.2.0

> Task :sdks:go:resolveBuildDependencies
Resolving golang.org/x/debug: commit='95515998a8a4bd7448134b2cb5971dbeb12e0b77', urls=[https://go.googlesource.com/debug]
Resolving golang.org/x/net: commit='2fb46b16b8dda405028c50f7c7f0f9dd1fa6bfb1', urls=[https://go.googlesource.com/net]
Resolving golang.org/x/oauth2: commit='a032972e28060ca4f5644acffae3dfc268cc09db', urls=[https://go.googlesource.com/oauth2]
Resolving golang.org/x/sync: commit='fd80eb99c8f653c847d294a001bdf2a3a6f768f5', urls=[https://go.googlesource.com/sync]
Resolving golang.org/x/sys: commit='37707fdb30a5b38865cfb95e5aab41707daec7fd', urls=[https://go.googlesource.com/sys]
Resolving cached github.com/coreos/etcd: commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', urls=[https://github.com/coreos/etcd.git, git@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', urls=[https://github.com/coreos/etcd.git, git@github.com:coreos/etcd.git]

> Task :sdks:java:harness:shadowJar

> Task :sdks:python:sdist
setup.py:251: UserWarning: You are using Apache Beam with Python 2. New releases of Apache Beam will soon support Python 3 only.
  'You are using Apache Beam with Python 2. '
<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/1922375555/local/lib/python2.7/site-packages/setuptools/dist.py>:476: UserWarning: Normalizing '2.21.0.dev' to '2.21.0.dev0'
  normalized_version,
INFO:gen_protos:Regenerating Python proto definitions (no output files).
INFO:gen_protos:Found protoc_gen_mypy at <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/1922375555/bin/protoc-gen-mypy>
metrics.proto: warning: Import google/protobuf/timestamp.proto but not used.
beam_fn_api.proto: warning: Import google/protobuf/descriptor.proto but not used.
beam_fn_api.proto: warning: Import google/protobuf/wrappers.proto but not used.
beam_interactive_api.proto: warning: Import google/protobuf/timestamp.proto but not used.
Writing mypy to endpoints_pb2.pyi
Writing mypy to external_transforms_pb2.pyi
Writing mypy to beam_provision_api_pb2.pyi
Writing mypy to beam_runner_api_pb2.pyi
Writing mypy to standard_window_fns_pb2.pyi
Writing mypy to beam_artifact_api_pb2.pyi
Writing mypy to beam_fn_api_pb2.pyi
Writing mypy to metrics_pb2.pyi
Writing mypy to schema_pb2.pyi
Writing mypy to beam_job_api_pb2.pyi
Writing mypy to beam_interactive_api_pb2.pyi
Writing mypy to beam_expansion_api_pb2.pyi
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
RefactoringTool: Skipping optional fixer: idioms
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
RefactoringTool: Skipping optional fixer: ws_comma
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
root: Generating grammar tables from /usr/lib/python2.7/lib2to3/PatternGrammar.txt
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_artifact_api_pb2.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_artifact_api_pb2_grpc.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_expansion_api_pb2.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_expansion_api_pb2_grpc.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_fn_api_pb2.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_fn_api_pb2_grpc.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_interactive_api_pb2.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_job_api_pb2.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_job_api_pb2_grpc.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_provision_api_pb2.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_provision_api_pb2_grpc.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_runner_api_pb2.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_runner_api_pb2_grpc.py>
RefactoringTool: No changes to <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/endpoints_pb2.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/external_transforms_pb2.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/metrics_pb2.py>
RefactoringTool: No changes to <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/schema_pb2.py>
RefactoringTool: Refactored <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/standard_window_fns_pb2.py>
RefactoringTool: Files that were modified:
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_artifact_api_pb2.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_artifact_api_pb2_grpc.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_expansion_api_pb2.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_expansion_api_pb2_grpc.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_fn_api_pb2.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_fn_api_pb2_grpc.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_interactive_api_pb2.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_job_api_pb2.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_job_api_pb2_grpc.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_provision_api_pb2.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_provision_api_pb2_grpc.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_runner_api_pb2.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_runner_api_pb2_grpc.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/endpoints_pb2.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/external_transforms_pb2.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/metrics_pb2.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/schema_pb2.py>
RefactoringTool: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/standard_window_fns_pb2.py>
INFO:gen_protos:Writing urn stubs: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/metrics_pb2_urns.py>
INFO:gen_protos:Writing urn stubs: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_artifact_api_pb2_urns.py>
INFO:gen_protos:Writing urn stubs: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/standard_window_fns_pb2_urns.py>
INFO:gen_protos:Writing urn stubs: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_fn_api_pb2_urns.py>
INFO:gen_protos:Writing urn stubs: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_job_api_pb2_urns.py>
INFO:gen_protos:Writing urn stubs: <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/portability/api/beam_runner_api_pb2_urns.py>
warning: no files found matching 'README.md'
warning: no files found matching 'NOTICE'
warning: no files found matching 'LICENSE'
warning: sdist: standard file not found: should have one of README, README.rst, README.txt, README.md


> Task :sdks:python:container:py37:copyDockerfileDependencies
> Task :runners:java-fn-execution:compileJava FROM-CACHE
> Task :runners:java-fn-execution:classes UP-TO-DATE
> Task :runners:java-fn-execution:jar

> Task :runners:spark:compileJava
Note: Some input files use or override a deprecated API.
Note: Recompile with -Xlint:deprecation for details.
Note: Some input files use unchecked or unsafe operations.
Note: Recompile with -Xlint:unchecked for details.

> Task :runners:spark:classes
> Task :runners:spark:jar
> Task :runners:spark:job-server:compileJava NO-SOURCE
> Task :runners:spark:job-server:classes UP-TO-DATE
> Task :runners:spark:job-server:shadowJar

> Task :sdks:go:resolveBuildDependencies
Resolving google.golang.org/api: commit='386d4e5f4f92f86e6aec85985761bba4b938a2d5', urls=[https://code.googlesource.com/google-api-go-client]
Resolving google.golang.org/genproto: commit='2b5a72b8730b0b16380010cfe5286c42108d88e7', urls=[https://github.com/google/go-genproto]
Resolving google.golang.org/grpc: commit='7646b5360d049a7ca31e9133315db43456f39e2e', urls=[https://github.com/grpc/grpc-go]
Resolving cached github.com/coreos/etcd: commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', urls=[https://github.com/coreos/etcd.git, git@github.com:coreos/etcd.git]
Resolving cached github.com/coreos/etcd: commit='11214aa33bf5a47d3d9d8dafe0f6b97237dfe921', urls=[https://github.com/coreos/etcd.git, git@github.com:coreos/etcd.git]

> Task :sdks:go:installDependencies
> Task :sdks:go:buildLinuxAmd64
> Task :sdks:go:goBuild

> Task :sdks:python:container:resolveBuildDependencies
Resolving ./github.com/apache/beam/sdks/go@<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/go>

> Task :sdks:python:container:installDependencies
> Task :sdks:python:container:buildDarwinAmd64
> Task :sdks:python:container:buildLinuxAmd64
> Task :sdks:python:container:goBuild
> Task :sdks:python:container:py37:copyLauncherDependencies
> Task :sdks:python:container:py37:dockerPrepare
> Task :sdks:python:container:py37:docker
The command '/bin/sh -c pip install -r /tmp/base_image_requirements.txt &&     python -c "from google.protobuf.internal import api_implementation; assert api_implementation._default_implementation_type == 'cpp'; print ('Verified fast protobuf used.')" &&     rm -rf /root/.cache/pip' returned a non-zero code: 2

> Task :sdks:python:container:py37:docker FAILED

FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':sdks:python:container:py37:docker'.
> Process 'command 'docker'' finished with non-zero exit value 2

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 2m 33s
64 actionable tasks: 50 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/yszhphxz52lbw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1068

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1068/display/redirect?page=changes>

Changes:

[spoorti] [BEAM-9660]: Add an explicit check for integer overflow.


------------------------------------------
[...truncated 63.63 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403161516-5a295b80_597a85f2-41a5-4c7a-ac68-dd77fc715edc failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 12m 53s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/5i7zvuhvmspg6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1067

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1067/display/redirect?page=changes>

Changes:

[mxm] [BEAM-9645] Fix premature removal of Docker container and logs


------------------------------------------
[...truncated 63.41 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403130944-62247562_b83f5ba6-f2af-470a-90c9-b14a76c39871 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 37s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/5dhipz4b26icq

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1066

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1066/display/redirect>

Changes:


------------------------------------------
[...truncated 63.42 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403122143-7deae8d0_2267ee5f-e9af-407b-8555-3040cdd26a2e failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 14m 46s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/q4c2fah7swtwo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1065

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1065/display/redirect?page=changes>

Changes:

[alex] [BEAM-9044] Protobuf options to Schema options


------------------------------------------
[...truncated 63.43 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, urllib3, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403115411-9b8f7d09_bb605b07-ecb2-4c15-9904-cc8d59311005 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 40s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/wifynecx62lfs

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1064

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1064/display/redirect?page=changes>

Changes:

[ameihm] [BEAM-9476] KinesisIO retry LimitExceededException


------------------------------------------
[...truncated 62.98 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403094252-a1b1396e_8ed8a25b-a074-4853-9c0d-d3d9ecbad4aa failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 42s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/7vlryrhk7pabk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1063

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1063/display/redirect>

Changes:


------------------------------------------
[...truncated 63.37 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, urllib3, idna, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403060814-3180ccb6_df09d9e9-6324-46b6-85a2-c3aff8ef0a4f failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 55s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/jxzvinmal5ep4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1062

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1062/display/redirect?page=changes>

Changes:

[github] [BEAM-9136]Add licenses for dependencies for Python (#11067)


------------------------------------------
[...truncated 62.98 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403044557-2fa401ea_af6f77fe-e078-4407-acfa-233ba2b8d0ab failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 30s
65 actionable tasks: 50 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/a3z23ujbpfsh2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1061

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1061/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-9677] Fix path -> url typo in ArtifactUrlPayload


------------------------------------------
[...truncated 62.93 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, certifi, chardet, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403021127-ccb098f2_497aba24-ac99-43df-ae25-13def51626d5 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 11s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/srym46yovnizk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1060

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1060/display/redirect?page=changes>

Changes:

[github] Fix minor typo

[github] Fix minor typo


------------------------------------------
[...truncated 63.23 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, certifi, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0403003857-eee1ec13_7def0edd-d248-437d-87f3-71c56b857c76 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 58s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/smu5o7mufntxo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1059

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1059/display/redirect?page=changes>

Changes:

[github] [BEAM-4374] Short IDs for the Python SDK (#11286)


------------------------------------------
[...truncated 62.90 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402230746-c2e0edd2_10d02af9-4821-4e16-be25-2c39dca5e898 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 30s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/7gl7pd3rdmpsm

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1058

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1058/display/redirect?page=changes>

Changes:

[github] Merge pull request #11290: [BEAM-9670] Fix nullability widening in


------------------------------------------
[...truncated 513.83 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402222601-b2a220a8_53f8d50f-18ef-4bd3-8340-d2ba21c4a867 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 55s
64 actionable tasks: 59 executed, 4 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/aupt4nckd2hh6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1057

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1057/display/redirect?page=changes>

Changes:

[robertwb] [BEAM-9577] Add dependency information to provision info.

[robertwb] Update go protos.


------------------------------------------
[...truncated 64.86 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, certifi, idna, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402213736-5d09845e_6caf0983-1c7c-4f1c-a20f-16a4514e2e0c failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 10s
64 actionable tasks: 56 executed, 7 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/epx3dmucdjwp4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1056

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1056/display/redirect?page=changes>

Changes:

[github] [BEAM-9667] Allow metrics in DoFn Setup (#11287)

[github] [BEAM-9624] Adds Convert to Accumulators operator for use in combiner


------------------------------------------
[...truncated 519.33 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402212117-88a57b2_de112306-737b-4b4a-a9eb-74c391a620c4 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 2s
64 actionable tasks: 62 executed, 1 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/z5vvaul23u2de

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1055

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1055/display/redirect>

Changes:


------------------------------------------
Started by GitHub push by lostluck
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-3 (beam) in workspace <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/>
No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src> # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/*
ERROR: Error cloning remote repo 'origin'
hudson.plugins.git.GitException: Command "git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout: 
stderr: fatal: unable to access 'https://github.com/apache/beam.git/': The requested URL returned error: 504

	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:2172)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandWithCredentials(CliGitAPIImpl.java:1864)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.access$500(CliGitAPIImpl.java:78)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl$1.execute(CliGitAPIImpl.java:545)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl$2.execute(CliGitAPIImpl.java:758)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler$1.call(RemoteGitImpl.java:153)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler$1.call(RemoteGitImpl.java:146)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at hudson.remoting.Engine$1.lambda$newThread$0(Engine.java:93)
	at java.lang.Thread.run(Thread.java:748)
	Suppressed: hudson.remoting.Channel$CallSiteStackTrace: Remote call to JNLP4-connect connection from 89.93.66.34.bc.googleusercontent.com/34.66.93.89:39434
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1743)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:957)
		at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler.execute(RemoteGitImpl.java:146)
		at sun.reflect.GeneratedMethodAccessor796.invoke(Unknown Source)
		at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
		at java.lang.reflect.Method.invoke(Method.java:498)
		at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler.invoke(RemoteGitImpl.java:132)
		at com.sun.proxy.$Proxy141.execute(Unknown Source)
		at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1152)
		at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1192)
		at hudson.scm.SCM.checkout(SCM.java:504)
		at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
		at hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
		at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
		at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
		at hudson.model.Run.execute(Run.java:1815)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
ERROR: Error cloning remote repo 'origin'
Retrying after 10 seconds
No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src> # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/*
ERROR: Error cloning remote repo 'origin'
hudson.plugins.git.GitException: Command "git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout: 
stderr: remote: Internal Server Error.
remote: 
fatal: unable to access 'https://github.com/apache/beam.git/': The requested URL returned error: 500

	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:2172)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandWithCredentials(CliGitAPIImpl.java:1864)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.access$500(CliGitAPIImpl.java:78)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl$1.execute(CliGitAPIImpl.java:545)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl$2.execute(CliGitAPIImpl.java:758)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler$1.call(RemoteGitImpl.java:153)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler$1.call(RemoteGitImpl.java:146)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at hudson.remoting.Engine$1.lambda$newThread$0(Engine.java:93)
	at java.lang.Thread.run(Thread.java:748)
	Suppressed: hudson.remoting.Channel$CallSiteStackTrace: Remote call to JNLP4-connect connection from 89.93.66.34.bc.googleusercontent.com/34.66.93.89:39434
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1743)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:957)
		at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler.execute(RemoteGitImpl.java:146)
		at sun.reflect.GeneratedMethodAccessor796.invoke(Unknown Source)
		at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
		at java.lang.reflect.Method.invoke(Method.java:498)
		at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler.invoke(RemoteGitImpl.java:132)
		at com.sun.proxy.$Proxy141.execute(Unknown Source)
		at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1152)
		at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1192)
		at hudson.scm.SCM.checkout(SCM.java:504)
		at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
		at hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
		at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
		at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
		at hudson.model.Run.execute(Run.java:1815)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
ERROR: Error cloning remote repo 'origin'
Retrying after 10 seconds
No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src> # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
ERROR: Error fetching remote repo 'origin'
hudson.plugins.git.GitException: Failed to fetch from https://github.com/apache/beam.git
	at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:894)
	at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1161)
	at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1192)
	at hudson.scm.SCM.checkout(SCM.java:504)
	at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
	at hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
	at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
	at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
	at hudson.model.Run.execute(Run.java:1815)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
Caused by: hudson.plugins.git.GitException: Command "git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*" returned status code 128:
stdout: 
stderr: fatal: unable to access 'https://github.com/apache/beam.git/': The requested URL returned error: 504

	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:2172)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandWithCredentials(CliGitAPIImpl.java:1864)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.access$500(CliGitAPIImpl.java:78)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl$1.execute(CliGitAPIImpl.java:545)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler$1.call(RemoteGitImpl.java:153)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler$1.call(RemoteGitImpl.java:146)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at hudson.remoting.Engine$1.lambda$newThread$0(Engine.java:93)
	at java.lang.Thread.run(Thread.java:748)
	Suppressed: hudson.remoting.Channel$CallSiteStackTrace: Remote call to JNLP4-connect connection from 89.93.66.34.bc.googleusercontent.com/34.66.93.89:39434
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1743)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:957)
		at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler.execute(RemoteGitImpl.java:146)
		at sun.reflect.GeneratedMethodAccessor796.invoke(Unknown Source)
		at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
		at java.lang.reflect.Method.invoke(Method.java:498)
		at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler.invoke(RemoteGitImpl.java:132)
		at com.sun.proxy.$Proxy123.execute(Unknown Source)
		at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:892)
		at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1161)
		at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1192)
		at hudson.scm.SCM.checkout(SCM.java:504)
		at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
		at hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
		at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
		at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
		at hudson.model.Run.execute(Run.java:1815)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
ERROR: Error fetching remote repo 'origin'

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1054

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1054/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-9562] Update missed TimerSpec conversion in Go SDK


------------------------------------------
[...truncated 62.70 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, idna, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402195420-e4ca1c15_a2ec0455-d079-425e-b46d-ce58571aac2c failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 43s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/46zsnhtjruq24

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1053

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1053/display/redirect?page=changes>

Changes:

[valentyn] Fix a Py2/3 incompatibility in profiler.


------------------------------------------
[...truncated 62.74 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Downloading typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, urllib3, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.2 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402181455-aa6f3471_f3561266-dadf-4159-b3fc-3deeb8e2868b failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 40s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/oszdsutl4ka2i

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1052

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1052/display/redirect?page=changes>

Changes:

[github] [BEAM-7923] An indicator of progress in notebooks (#11276)


------------------------------------------
[...truncated 63.17 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, certifi, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402171054-5e9c593_95a04286-e6e3-4145-98ff-bc9250acc2d4 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 26s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/bw6uomcqgtlbw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1051

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1051/display/redirect>

Changes:


------------------------------------------
[...truncated 62.69 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, chardet, urllib3, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402120839-7856136_79771f07-d621-4d29-aceb-ad6a4e676331 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 18s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/f4zkgojnthw3w

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1050

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1050/display/redirect>

Changes:


------------------------------------------
[...truncated 63.15 KB...]
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Collecting future<1.0.0,>=0.16.0
  Downloading future-0.18.2.tar.gz (829 kB)
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.0-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Building wheels for collected packages: future
  Building wheel for future (setup.py): started
  Building wheel for future (setup.py): finished with status 'done'
  Created wheel for future: filename=future-0.18.2-py3-none-any.whl size=491058 sha256=624c415438fd20e22b86490831264de8d86368f0e32d81b8a09091b2efc8dcd1
  Stored in directory: /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0
Successfully built future
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, chardet, idna, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.0 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402060803-6e575954_affbd1a9-d444-4e54-a04e-e0ec29cd7290 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 16s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/iystrzyzd7cws

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1049

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1049/display/redirect?page=changes>

Changes:

[github] [BEAM-9652] Ensure that the multipartition write sets the correct coder

[github] [BEAM-8889]add experiment flag use_grpc_for_gcs (#11183)


------------------------------------------
[...truncated 63.81 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.0-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, chardet, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.0 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402040917-bea1ad14_27b4b4c1-d9f6-416e-ab65-df02d97fb39e failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 20s
64 actionable tasks: 53 executed, 10 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/zjkkwqxd4nmn2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1048

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1048/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-9668] Disable tests till Dataflow containers are updated.


------------------------------------------
[...truncated 63.13 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.28.0-cp37-cp37m-manylinux2010_x86_64.whl (2.8 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.28.0 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402035409-cd5bf52d_7e1938dd-d925-49a0-9aae-6411f43768be failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 44s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/wy22lta6jgcx6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1047

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1047/display/redirect?page=changes>

Changes:

[pabloem] Revert "Merge pull request #11104 from y1chi/update_tornado_test"


------------------------------------------
[...truncated 63.24 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, certifi, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402013525-d16779df_253b5595-0ce4-43c4-a61d-10d62480b00b failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 2s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ghswg3tthav34

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1046

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1046/display/redirect?page=changes>

Changes:

[daniel.o.programmer] [BEAM-9642] Create runtime invokers for SDF methods.


------------------------------------------
[...truncated 62.75 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0402002037-5b428554_f20d865a-96f0-49a9-8950-ecca56e38944 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 20s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/wvyfc2ztkk4yu

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1045

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1045/display/redirect?page=changes>

Changes:

[robertwb] [BEAM-9340] Populate requirement for timer families.


------------------------------------------
[...truncated 62.73 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/0d/e7/b6/0dd30343ceca921cfbd91f355041bd9c69e0f40b49f25b7b8a/httplib2-0.12.0-py3-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0401215721-a7ccfe7a_949b0816-c064-40ab-9e7c-f5c24c574722 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 33s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/uawlfrltx4xn6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1044

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1044/display/redirect?page=changes>

Changes:

[zyichi] Update BigQuery source in bigquery_tornadoes example

[rohde.samuel] Add dependency comment in streaming cache

[ehudm] [BEAM-1894] Remove obsolete EagerRunner test


------------------------------------------
[...truncated 62.49 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, certifi, chardet, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0401182445-4b2cfeaf_6fcb15af-45c8-4540-a2cb-56f547d87ee8 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 19s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/xlfaisssp4ahi

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1043

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1043/display/redirect>

Changes:


------------------------------------------
[...truncated 63.06 KB...]
Already using interpreter /usr/bin/python3.7
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly


if [ "${BASH_SOURCE-}" = "$0" ]; then
    echo "You must source this script: \$ source $0" >&2
    exit 33
fi

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0401121053-136cb625_cd633bc1-31ca-4461-9498-e53a55a848ca failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 34s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ewxqrod7t3vpi

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1042

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1042/display/redirect?page=changes>

Changes:

[jozsi] Update Jet version to 4.0

[jozsi] Update Jet Runner web page with info about 4.0

[jozsi] Add Beam-Jet compatibility table


------------------------------------------
[...truncated 62.98 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, idna, certifi, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0401081531-abe32e31_2b24371e-bf93-4447-bc4f-644cf15e15fc failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 10s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/v7tcpuslemhaa

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1041

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1041/display/redirect>

Changes:


------------------------------------------
[...truncated 63.30 KB...]

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:root:Waiting for grpc channel to be ready at localhost:48277.
WARNING:root:Waiting for grpc channel to be ready at localhost:48277.
WARNING:root:Waiting for grpc channel to be ready at localhost:48277.
WARNING:root:Waiting for grpc channel to be ready at localhost:48277.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0401061322-1ed09fe7_c95c545f-366d-4e9c-9664-2897bd1e238f failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 13m 0s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/km7qtweacjkyq

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1040

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1040/display/redirect?page=changes>

Changes:

[zyichi] [BEAM-9263] Bump up python sdk dataflow environment major versions


------------------------------------------
[...truncated 63.16 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, urllib3, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0401035623-67c1e3db_2c95f9da-8e08-4418-b8a2-a58e54482ba5 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 0s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/nor62zaqbrn2c

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1039

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1039/display/redirect?page=changes>

Changes:

[pabloem] [BEAM-9608] Increase reliance on Context Managers for FnApiRunner


------------------------------------------
[...truncated 63.64 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, certifi, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0401033045-ed752b2f_66108479-55cd-4b4a-98b8-cb1592656984 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 44s
64 actionable tasks: 50 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/hzkdq66u4zaue

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1038

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1038/display/redirect>

Changes:


------------------------------------------
[...truncated 62.97 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, idna, urllib3, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0401004028-624fc66b_15888613-776e-4946-aa1b-eb12a5843304 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 19s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/mghulpqoorxbg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1037

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1037/display/redirect?page=changes>

Changes:

[robertwb] [BEAM-9577] Rename the Artifact{Staging,Retrieval}Service.

[robertwb] [BEAM-9577] Define the new Artifact{Staging,Retrieval}Service.

[robertwb] [BEAM-9577] Regenerate protos.

[robertwb] [BEAM-9577] Implement the new Artifact{Staging,Retrieval}Services in


------------------------------------------
[...truncated 62.99 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, chardet, urllib3, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0331225938-ed51ffcc_563354c9-20f9-43d9-80c1-1f6f1ae52a63 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 0s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/a6xmncv5fh4oc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1036

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1036/display/redirect?page=changes>

Changes:

[chamikara] Refactors X-Lang test pipelines.


------------------------------------------
[...truncated 63.39 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, certifi, chardet, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0331205408-6a6e9b04_2334d6f3-1e3a-43ec-bdab-a30525fd1b38 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 10s
64 actionable tasks: 50 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/hz5fnh5hsme52

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1035

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1035/display/redirect?page=changes>

Changes:

[kyoungha] [BEAM-9325] Added Proper Write Method in UnownedOutputStream

[kyoungha] fix format warning

[kyoungha] [BEAM-9325] reflect comment : inline testing methods

[kyoungha] [BEAM-9325] reflect comment : Fix JAXBCoder + change test


------------------------------------------
[...truncated 62.72 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0331203143-97ecada4_7b12055d-6e58-4c68-8557-adab9734eeb3 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 28s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/nreibdqtfblzs

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1034

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1034/display/redirect?page=changes>

Changes:

[samuelw] [BEAM-9399] Change the redirection of System.err to be a custom

[samuelw] Fix missing test import


------------------------------------------
[...truncated 62.56 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0331201154-5a07ee9c_f82bb783-659d-4d0b-806d-36bebccc9f5e failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 19s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ciqdfcw2rrwz2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1033

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1033/display/redirect>

Changes:


------------------------------------------
[...truncated 63.15 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0331182154-206e1ac0_fd4887ff-3a29-44d2-b6fd-27dff41942a6 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 42s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/xo2pho2cuazbg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1032

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1032/display/redirect?page=changes>

Changes:

[boyuanz] [BEAM-9454] Add Deduplication PTransform


------------------------------------------
[...truncated 62.93 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, urllib3, chardet, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0331165832-47f99568_9c75da6d-053f-48e6-a765-5012d4a9ac94 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 45s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/5c7h25qitrrvo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1031

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1031/display/redirect?page=changes>

Changes:

[kcweaver] [BEAM-9638] Strengthen worker region & zone options tests.


------------------------------------------
[...truncated 63.04 KB...]
Already using interpreter /usr/bin/python3.7
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly


if [ "${BASH_SOURCE-}" = "$0" ]; then
    echo "You must source this script: \$ source $0" >&2
    exit 33
fi

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, idna, certifi, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0331163935-a0ce76ab_29370621-07c9-4c8f-b60f-dd86eaa87e9f failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 34s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/tb4hr42heenvq

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1030

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1030/display/redirect>

Changes:


------------------------------------------
[...truncated 63.17 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0331120929-551edb7f_87fb6f3f-e963-40f4-92bc-606d9821af41 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 13s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/oahewxgxzwzso

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1029

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1029/display/redirect>

Changes:


------------------------------------------
[...truncated 62.75 KB...]
Already using interpreter /usr/bin/python3.7
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly


if [ "${BASH_SOURCE-}" = "$0" ]; then
    echo "You must source this script: \$ source $0" >&2
    exit 33
fi

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0331060829-90dd7f8_cf3f20b8-b080-4244-a5ce-49ee45b27d84 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 10s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/tf43bfmo2znq4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1028

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1028/display/redirect>

Changes:


------------------------------------------
[...truncated 63.17 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, certifi, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0331002459-7bbd4e43_18ba75c6-a023-4bff-9c9d-6ab586a95c89 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 51s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/5ceaedeca5sgo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1027

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1027/display/redirect?page=changes>

Changes:

[robinyqiu] Clean up code in ZetaSQLDialectSpecTest


------------------------------------------
[...truncated 62.73 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0330235531-70585a19_9f05b1b6-5aab-4927-bd0f-13d4c625bbdf failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 7s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/auzontiinrbhs

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1026

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1026/display/redirect?page=changes>

Changes:

[rohde.samuel] Address leftover styling comments from PR/10892

[rohde.samuel] address comments


------------------------------------------
[...truncated 63.17 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, certifi, urllib3, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0330211530-7593d721_af159fa8-96b0-4fdd-9543-e3c524a366cb failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 18s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ecpjtoz5khjlo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1025

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1025/display/redirect?page=changes>

Changes:

[kawaigin] [BEAM-7923] Fix datatable on notebook reloading


------------------------------------------
[...truncated 62.93 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0330203403-42944212_0fb140c9-1c34-47be-8bb8-c52ecfc88c1d failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 9s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/accyaeadx2cx6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1024

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1024/display/redirect?page=changes>

Changes:

[lostluck] accept generated metrics

[lostluck] Remove mType and move type urns to urnToType

[lostluck] add missing pcollection payload


------------------------------------------
[...truncated 62.91 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0330200902-a22c7b1_8d5dd94f-d445-4d30-9e46-1a82a9c4664d failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 9s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/4clnd7pc7fk4e

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1023

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1023/display/redirect?page=changes>

Changes:

[kcweaver] [BEAM-9446] Retain unknown arguments when using uber jar job server.

[kcweaver] Enable '--option=value' and '-option' syntax.

[github] Use split instead of rsplit


------------------------------------------
[...truncated 62.93 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, chardet, idna, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 550, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0330195345-e4889756_aec8438c-6302-427c-b7f3-8f64cc52d5f7 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 42s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/cm7vo5djvko5a

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1022

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1022/display/redirect>

Changes:


------------------------------------------
[...truncated 62.71 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, urllib3, certifi, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0330184912-f7d0f220_3895cb60-e36d-48a3-873d-8e9a7782b684 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 8s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/nuzhdcrnegpom

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1021

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1021/display/redirect?page=changes>

Changes:

[robertwb] Side input signals for legacy worker.

[github] Update documentation

[apilloud] [BEAM-9512] Map anonymous structs to schema


------------------------------------------
[...truncated 63.28 KB...]
Already using interpreter /usr/bin/python3.7
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly


if [ "${BASH_SOURCE-}" = "$0" ]; then
    echo "You must source this script: \$ source $0" >&2
    exit 33
fi

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, certifi, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0330180543-e85f01fa_9f494142-7cb9-4103-b411-d53b59951b14 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 21s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/4lftr3yftmcxw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1020

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1020/display/redirect>

Changes:


------------------------------------------
[...truncated 63.19 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:root:Waiting for grpc channel to be ready at localhost:57711.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0330120937-1575838d_bfc6ba09-f3c9-451b-8057-49a4e48ed7d4 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 19s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/brvu7k6ffmhso

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1019

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1019/display/redirect>

Changes:


------------------------------------------
[...truncated 63.15 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0330060843-aa8a2992_7dd40783-6dda-4a75-8549-4caf1be7f3bd failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 25s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/mnxdybu524m6m

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1018

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1018/display/redirect>

Changes:


------------------------------------------
[...truncated 63.15 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0330000901-960ffddf_2c36d1f3-f760-48f6-aaf9-34fb3845a6e1 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 43s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/qpgxqtwceujge

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1017

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1017/display/redirect>

Changes:


------------------------------------------
[...truncated 63.15 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, certifi, urllib3, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0329180927-7de28ffb_d4f4ba36-0e89-43ac-b25f-a5622a55a3b5 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 9s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/brc5gss3awqo2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1016

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1016/display/redirect>

Changes:


------------------------------------------
[...truncated 63.15 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, idna, urllib3, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0329120906-cd1cdd00_0cf36eac-f007-4f1d-ad1b-317ca50197e9 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 48s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/zbmhjxacnze7o

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1015

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1015/display/redirect>

Changes:


------------------------------------------
[...truncated 62.69 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, certifi, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0329060917-2047049a_54d410f5-dca8-404f-9a88-ce8a3e59db69 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 58s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/z5l75pd6qjyck

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1014

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1014/display/redirect?page=changes>

Changes:

[github] [BEAM-9557] Fix timer window boundary checking (#11252)


------------------------------------------
[...truncated 63.17 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0329004135-6ececb98_c36f4815-579a-44fe-aff7-4452d913f12e failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 27s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ulqlkyjj3fu6a

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1013

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1013/display/redirect>

Changes:


------------------------------------------
[...truncated 63.14 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, certifi, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0329000901-73d246c9_9d28b4fe-43de-41b1-acbf-663ea55ed696 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 43s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/56di45vfc5sak

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1012

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1012/display/redirect>

Changes:


------------------------------------------
[...truncated 62.91 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0328180919-919bf7ae_5613c635-a615-4734-99ae-8f61b4c7739a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 1s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/2eyryf3sy2q6s

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1011

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1011/display/redirect>

Changes:


------------------------------------------
[...truncated 62.69 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0328120853-e1e78055_8463d0c6-4610-4e94-bc10-ce5df796bb63 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 36s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/lo5xtxmxxnhzg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1010

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1010/display/redirect>

Changes:


------------------------------------------
[...truncated 62.69 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, certifi, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0328061208-42912031_63b014a2-af9f-4f6c-bf95-51e7f8a30554 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 38s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ebnwlgfwjjuto

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1009

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1009/display/redirect>

Changes:


------------------------------------------
[...truncated 63.06 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, idna, urllib3, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0328012331-a2a44efb_9db8ae33-fa3c-4854-a0ef-b5e03896d4c1 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 11m 18s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/xes2jlpyczcs2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1008

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1008/display/redirect?page=changes>

Changes:

[github] Merge pull request #10883: [BEAM-9331] Add better Row builders

[github] [BEAM-8292] Portable Reshuffle for Go SDK (#11197)


------------------------------------------
[...truncated 63.41 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0328000143-7141dafd_932546e4-5a93-4b5a-a819-a5bad7aeeca8 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 12s
64 actionable tasks: 50 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/hm32bcpokzsg2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1007

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1007/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-4374] Update protos related to MonitoringInfo.


------------------------------------------
[...truncated 62.90 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, idna, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0327214414-d47dcd58_b749b3c2-b394-4d88-bbe5-d5f41d5e5388 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 55s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/pocns5ntpnlq6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1006

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1006/display/redirect?page=changes>

Changes:

[robertwb] [BEAM-4150] Use explicit map for data channel coders.

[robertwb] [BEAM-4150] Don't window PCollection coders.

[github] Update the Go SDK roadmap for April 2020 (#11239)


------------------------------------------
[...truncated 63.17 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Using cached pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0327201517-92f99cdf_d87d547f-d997-4a8b-bfee-09768f5c3b87 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 30s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/myshkogoijeoc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1005

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1005/display/redirect>

Changes:


------------------------------------------
[...truncated 63.43 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Downloading pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0327182506-9b2869e_6ca328a3-e02a-4e3f-816b-9b5e91127b45 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 13m 52s
64 actionable tasks: 50 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/vms3ldld3qmpe

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1004

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1004/display/redirect?page=changes>

Changes:

[github] Update the range for pyarrow


------------------------------------------
[...truncated 63.09 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.17.0,>=0.15.1
  Downloading pyarrow-0.16.0-cp37-cp37m-manylinux2014_x86_64.whl (63.1 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, certifi, chardet, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.16.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0327171228-87d6215a_997265a7-b4fc-485f-bc4d-4525e9614de4 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 15s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/nxwl3xnhfjcuy

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1003

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1003/display/redirect?page=changes>

Changes:

[alex] [BEAM-9605] BIP-1: Rename setRowOption to setOption on Option builder


------------------------------------------
[...truncated 63.16 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, idna, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:root:Waiting for grpc channel to be ready at localhost:54013.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0327130147-12297ead_2f3b9e11-b239-494d-a5bb-0b9b1ea8dae3 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 15s
64 actionable tasks: 50 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/d67fs6rjmcgze

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1002

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1002/display/redirect>

Changes:


------------------------------------------
[...truncated 62.66 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0327120921-3df61d5d_0a73ef68-dea8-44f6-a583-64b43456c4ac failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 2s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/x43u3xf3zbvo6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1001

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1001/display/redirect?page=changes>

Changes:

[alex] [BEAM-9605] BIP-1: Rename setRowOption to setOption on Option builder


------------------------------------------
[...truncated 65.44 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, certifi, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0327095116-96aa6398_a2f11b30-4f76-4604-8d3e-b0c17a5aed48 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 55s
64 actionable tasks: 59 executed, 4 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/xvgdqo7vpd6dk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1000

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1000/display/redirect>

Changes:


------------------------------------------
[...truncated 63.21 KB...]
Already using interpreter /usr/bin/python3.7
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly


if [ "${BASH_SOURCE-}" = "$0" ]; then
    echo "You must source this script: \$ source $0" >&2
    exit 33
fi

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, chardet, urllib3, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0327062518-c4543c02_6d897ba7-8379-416a-8df1-4e3c2b32b940 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 4s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/4y3ol73th7lgm

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #999

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/999/display/redirect?page=changes>

Changes:

[boyuanz] Remove TimeSpec from proto


------------------------------------------
[...truncated 63.09 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, chardet, certifi, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0327043548-3fabc233_17801dc9-7430-4c89-8db7-ad1aad6db54d failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 8s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/zbbw627ll4xuc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #998

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/998/display/redirect?page=changes>

Changes:

[github] [BEAM-9574] Ensure that instances of generated namedtuple classes can be


------------------------------------------
[...truncated 62.79 KB...]
Already using interpreter /usr/bin/python3.7
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly


if [ "${BASH_SOURCE-}" = "$0" ]; then
    echo "You must source this script: \$ source $0" >&2
    exit 33
fi

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, chardet, idna, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0327013121-53c0a6dd_ad429107-52ac-4164-a308-43f644a65304 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 56s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/22epgnooipicw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #997

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/997/display/redirect>

Changes:


------------------------------------------
[...truncated 62.86 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, certifi, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0327001746-29a123b6_9f89b757-26be-4388-9817-ec490b14ecfd failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 11s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/obm53zdrn6fn4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #996

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/996/display/redirect?page=changes>

Changes:

[apilloud] [BEAM-9609] Upgrade to ZetaSQL 2020.03.2


------------------------------------------
[...truncated 62.76 KB...]
Already using interpreter /usr/bin/python3.7
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly


if [ "${BASH_SOURCE-}" = "$0" ]; then
    echo "You must source this script: \$ source $0" >&2
    exit 33
fi

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0326215034-c947a313_9d5ae19d-1062-427d-a2e1-d70d6ddb0be7 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 4s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/uazslrdjqsd7u

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #995

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/995/display/redirect?page=changes>

Changes:

[ehudm] [BEAM-8078] Disable test_streaming_wordcount_debugging_it


------------------------------------------
[...truncated 62.66 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0326204701-73303c58_c2e553e2-a0a3-4480-9d35-6a2d06be3b31 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 47s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/jvn4mrzhw65oo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #994

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/994/display/redirect?page=changes>

Changes:

[robertwb] Add base SDK version to environment capabilities for Python and Java.

[robertwb] [BEAM-9614] Add SDK id for go.

[github] [BEAM-9495] Make DataCatalogTableProvider AutoCloseable (#11116)


------------------------------------------
Started by GitHub push by robertwb
Started by GitHub push by robertwb
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-14 (beam) in workspace <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/>
No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src> # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 0ea3ec261a4d2813e763058861a6ea723fd7f533 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 0ea3ec261a4d2813e763058861a6ea723fd7f533
Commit message: "[BEAM-9495] Make DataCatalogTableProvider AutoCloseable (#11116)"
 > git rev-list --no-walk 589a3037f08920601dc53a7ced9e175142f03b5c # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/gradlew> --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g -Dorg.gradle.jvmargs=-Xmx4g :runners:spark:job-server:testPipelineJar
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.

> Configure project :sdks:python:container
Found go 1.12 in /usr/bin/go, use it.

FAILURE: Build failed with an exception.

* What went wrong:
Could not determine the dependencies of task ':runners:spark:job-server:shadowJar'.
> Could not resolve all dependencies for configuration ':runners:spark:job-server:runtimeClasspath'.
   > Could not resolve net.minidev:json-smart:[1.3.1,2.3].
     Required by:
         project :runners:spark:job-server > project :runners:spark > org.apache.hadoop:hadoop-common:2.8.5 > org.apache.hadoop:hadoop-auth:2.8.5 > com.nimbusds:nimbus-jose-jwt:4.41.1
      > Could not resolve net.minidev:json-smart:2.3-SNAPSHOT.
         > Unable to load Maven meta-data from https://oss.sonatype.org/content/repositories/staging/net/minidev/json-smart/2.3-SNAPSHOT/maven-metadata.xml.
            > Could not get resource 'https://oss.sonatype.org/content/repositories/staging/net/minidev/json-smart/2.3-SNAPSHOT/maven-metadata.xml'.
               > Could not GET 'https://oss.sonatype.org/content/repositories/staging/net/minidev/json-smart/2.3-SNAPSHOT/maven-metadata.xml'. Received status code 502 from server: Bad Gateway

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 21s

Publishing build scan...
https://gradle.com/s/utykriz4qdf66

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #993

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/993/display/redirect?page=changes>

Changes:

[robertwb] [BEAM-9340] Validate pipeline requirements in PipelineValidator.

[mxm] [BEAM-9566] Mitigate performance issue for output timestamp watermark


------------------------------------------
Started by GitHub push by lukecwik
Started by GitHub push by lukecwik
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-2 (beam) in workspace <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/>
No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src> # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 589a3037f08920601dc53a7ced9e175142f03b5c (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 589a3037f08920601dc53a7ced9e175142f03b5c
Commit message: "Merge pull request #11237: [BEAM-9566] Mitigate performance issue for output timestamp watermark holds"
 > git rev-list --no-walk 789d2ee3791f711d8f2b681ed85d261671d9476f # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/gradlew> --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g -Dorg.gradle.jvmargs=-Xmx4g :runners:spark:job-server:testPipelineJar
Starting a Gradle Daemon (subsequent builds will be faster)
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.

> Configure project :sdks:python:container
Found go 1.12 in /usr/bin/go, use it.

FAILURE: Build failed with an exception.

* What went wrong:
Could not determine the dependencies of task ':runners:spark:job-server:shadowJar'.
> Could not resolve all dependencies for configuration ':runners:spark:job-server:runtimeClasspath'.
   > Could not resolve net.minidev:json-smart:[1.3.1,2.3].
     Required by:
         project :runners:spark:job-server > project :runners:spark > org.apache.hadoop:hadoop-common:2.8.5 > org.apache.hadoop:hadoop-auth:2.8.5 > com.nimbusds:nimbus-jose-jwt:4.41.1
      > Could not resolve net.minidev:json-smart:2.3-SNAPSHOT.
         > Unable to load Maven meta-data from https://oss.sonatype.org/content/repositories/staging/net/minidev/json-smart/2.3-SNAPSHOT/maven-metadata.xml.
            > Could not get resource 'https://oss.sonatype.org/content/repositories/staging/net/minidev/json-smart/2.3-SNAPSHOT/maven-metadata.xml'.
               > Could not GET 'https://oss.sonatype.org/content/repositories/staging/net/minidev/json-smart/2.3-SNAPSHOT/maven-metadata.xml'. Received status code 502 from server: Bad Gateway

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 21s

Publishing build scan...
https://gradle.com/s/f5zikiydiemni

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #992

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/992/display/redirect>

Changes:


------------------------------------------
[...truncated 62.60 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, urllib3, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0326120921-4c5334d7_24af03df-fe22-4ed5-9fa4-abd40d3c16b9 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 3s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/mckj5yl53z3ge

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #991

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/991/display/redirect?page=changes>

Changes:

[github] [BEAM-7505] Add side input load test to Python SDK  (#11136)


------------------------------------------
[...truncated 62.86 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0326104350-176eb6ab_b1968aae-08c7-4de4-af0f-f5db06be77a6 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 29s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/egotruhnzk4ee

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #990

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/990/display/redirect?page=changes>

Changes:

[github] Add notes to change log.


------------------------------------------
[...truncated 62.58 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, chardet, certifi, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0326074854-e5b0035c_ec247ebc-23f1-4add-83cf-60a8ee7bf2e7 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 15s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/3czxafehjadgu

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #989

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/989/display/redirect?page=changes>

Changes:

[github] [BEAM-9371] Add SideInputLoadTest to Java SDK (#10949)


------------------------------------------
[...truncated 62.85 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, certifi, chardet, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0326072045-cf1d4b9f_dec61ba4-4c68-42fa-a60a-7ff84d14363a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 59s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/r3bqsrki2tnbc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #988

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/988/display/redirect>

Changes:


------------------------------------------
[...truncated 63.32 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, idna, certifi, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0326060935-be3aa421_acb3a19f-8af1-4c72-a24b-873ad39283d4 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 15s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/7azzruielx4m4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #987

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/987/display/redirect?page=changes>

Changes:

[ehudm] [BEAM-8078] Disable test_streaming_wordcount_it


------------------------------------------
[...truncated 62.55 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, certifi, idna, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0326015906-f2c3841f_6cf80e2a-f669-4d88-af35-9e662fa48811 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
>>> FAILURE
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 9s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/xr4lhti6aqrfw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #986

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/986/display/redirect?page=changes>

Changes:

[github] add @RequiresTimeSortedInput to CHANGES.md (#11228)


------------------------------------------
[...truncated 63.09 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, urllib3, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0326001926-eaba95d9_9163ee36-87b8-4d69-8226-3c186f66f9d0 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 22s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ruutkzgmc3n6i

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #985

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/985/display/redirect?page=changes>

Changes:

[daniel.o.programmer] [BEAM-3301] Fix another bug in DoFn validation, in exec.


------------------------------------------
[...truncated 63.02 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, urllib3, chardet, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325233858-d2c40a52_d9973b48-10f8-4a30-b0f9-ad4901c9c98a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 39s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/vzmt3joaw3orq

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #984

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/984/display/redirect?page=changes>

Changes:

[github] Update Go Protos (#11230)


------------------------------------------
[...truncated 62.60 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, chardet, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325231845-b1c17667_c5759e8a-eea4-40f0-8e37-1ed65e48b1c4 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 30s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/7z34bslvm2k5w

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #983

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/983/display/redirect?page=changes>

Changes:

[rohde.samuel] [BEAM-9601] Skip the streaming wordcount test because it uses a


------------------------------------------
[...truncated 62.91 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325223350-4b982f3a_3d47d95c-9f9f-48c7-b3ef-0ea38ac5714a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 10s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/7le76v2i7ynak

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #982

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/982/display/redirect?page=changes>

Changes:

[pabloem] Starting refactor of FnApiRunner

[pabloem] fixup

[pabloem] Fix lint issues

[pabloem] Creating file with worker handlers

[pabloem] Fixup

[pabloem] Fixing lint. Formatting

[pabloem] Fixup

[pabloem] Renaming method to be more appropriate

[github] fix formatter


------------------------------------------
[...truncated 63.17 KB...]
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, urllib3, idna, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Waiting for grpc channel to be ready at localhost:45683.
WARNING:root:Waiting for grpc channel to be ready at localhost:45683.
WARNING:root:Waiting for grpc channel to be ready at localhost:45683.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325211243-7c7513f_0700ee3d-c9c2-49a5-a786-c4ba35c49b20 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 6s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/vntq7h4q4tlco

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #981

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/981/display/redirect?page=changes>

Changes:

[github] [BEAM-8078] streaming_wordcount_debugging.py is missing a test (#10914)


------------------------------------------
Started by GitHub push by udim
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-9 (beam) in workspace <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/>
No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src> # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 3b0f2828d2f7b418915089da5b3e7cb3acfcd104 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 3b0f2828d2f7b418915089da5b3e7cb3acfcd104
Commit message: "[BEAM-8078] streaming_wordcount_debugging.py is missing a test (#10914)"
 > git rev-list --no-walk 8b4f023a5cf66affdfd2d275eb5da117ee29ed85 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/gradlew> --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g -Dorg.gradle.jvmargs=-Xmx4g :runners:spark:job-server:testPipelineJar
Starting a Gradle Daemon, 1 busy Daemon could not be reused, use --status for details
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.

FAILURE: Build failed with an exception.

* What went wrong:
Could not determine the dependencies of task ':runners:spark:job-server:shadowJar'.
> Could not resolve all dependencies for configuration ':runners:spark:job-server:runtimeClasspath'.
   > Could not resolve net.minidev:json-smart:[1.3.1,2.3].
     Required by:
         project :runners:spark:job-server > project :runners:spark > org.apache.hadoop:hadoop-common:2.8.5 > org.apache.hadoop:hadoop-auth:2.8.5 > com.nimbusds:nimbus-jose-jwt:4.41.1
      > Could not resolve net.minidev:json-smart:2.3-SNAPSHOT.
         > Unable to load Maven meta-data from https://oss.sonatype.org/content/repositories/staging/net/minidev/json-smart/2.3-SNAPSHOT/maven-metadata.xml.
            > Could not get resource 'https://oss.sonatype.org/content/repositories/staging/net/minidev/json-smart/2.3-SNAPSHOT/maven-metadata.xml'.
               > Could not GET 'https://oss.sonatype.org/content/repositories/staging/net/minidev/json-smart/2.3-SNAPSHOT/maven-metadata.xml'.
                  > Read timed out

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 2m 21s

Publishing build scan...
https://gradle.com/s/oaewp3xop3roc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #980

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/980/display/redirect>

Changes:


------------------------------------------
[...truncated 62.85 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.3)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, certifi, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325185324-fb8ca48e_374ad27e-cb6e-4696-bfbd-ba5cc0a81899 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 37s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/apdyl4bpe5acs

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #979

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/979/display/redirect?page=changes>

Changes:

[ehudm] [BEAM-9398] runtime_type_check: support setup

[robertwb] [BEAM-9340] Plumb requirements through Java SDK.

[robertwb] [BEAM-9340] Populate requirements for Java DoFn properties.

[ehudm] [BEAM-8280] Type hints via annotations snippets

[piotr.szuberski] [BEAM-9606] Add missing parameters in combine_test.py gradle example


------------------------------------------
[...truncated 62.96 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.2)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, urllib3, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325172341-9a85b9b6_60ed9baa-53c2-40ab-bba6-df0dcd9ed70a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 1s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/6yu44lodncz6o

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #978

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/978/display/redirect?page=changes>

Changes:

[github] Go changes for model updates. (#11211)


------------------------------------------
[...truncated 62.92 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.2)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325162209-5f7c8022_33d97d5f-fb9c-45f1-babf-12a317d13514 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 21s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/cidthmj6tp6hk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #977

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/977/display/redirect?page=changes>

Changes:

[jozo.vilcek] [BEAM-9420] Configurable timeout for blocking kafka API call(s)


------------------------------------------
[...truncated 63.12 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, chardet, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325135115-eb00e1ce_d98b84e4-42aa-487f-ada8-a08003d1aae0 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 49s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/zyhymb2xbzosm

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #976

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/976/display/redirect>

Changes:


------------------------------------------
[...truncated 62.96 KB...]
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Already using interpreter /usr/bin/python3.7
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly


if [ "${BASH_SOURCE-}" = "$0" ]; then
    echo "You must source this script: \$ source $0" >&2
    exit 33
fi

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, urllib3, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325120853-e91ef0e8_4d37784b-0469-4549-ac60-279a0897f5be failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 36s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/g4vjixvhvpsq6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #975

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/975/display/redirect?page=changes>

Changes:

[mxm] [BEAM-9573] Correct computing of watermark hold for timer output

[mxm] [BEAM-9580] Downgrade Flink version to 1.9 for Nexmark and


------------------------------------------
[...truncated 62.66 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325083419-9341b288_527153ea-78a8-4152-aeb5-8c2d59adb63a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 14s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/c546jj6lg24xw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #974

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/974/display/redirect>

Changes:


------------------------------------------
[...truncated 63.07 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, certifi, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325060816-1b689f5_91c042d4-0b0e-4a89-b3e3-1ba6c8260882 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 58s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/vg6z4hdyt3dve

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #973

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/973/display/redirect?page=changes>

Changes:

[github] Merge pull request #11215 from [BEAM-9601] unbreaking precommits


------------------------------------------
[...truncated 62.74 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325045352-4f9f4ab5_695d307b-f2a3-4b3e-9db1-d775e43f5a58 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 25s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/vibfromwk5hsq

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #972

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/972/display/redirect?page=changes>

Changes:

[rohde.samuel] Adds a streaming wordcount integration test

[pabloem] Fixing bqtest

[rohde.samuel] changed data to be less repetitive


------------------------------------------
[...truncated 63.24 KB...]
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, certifi, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Waiting for grpc channel to be ready at localhost:41395.
WARNING:root:Waiting for grpc channel to be ready at localhost:41395.
WARNING:root:Waiting for grpc channel to be ready at localhost:41395.
WARNING:root:Waiting for grpc channel to be ready at localhost:41395.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325005541-2e464023_57608da4-4006-4f93-ade0-59149693fc63 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 0s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/hq6djehcoozlg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #971

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/971/display/redirect?page=changes>

Changes:

[daniel.o.programmer] [BEAM-3301] Adding restriction trackers and validation.


------------------------------------------
[...truncated 62.88 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, certifi, idna, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0325000500-70a3d736_adb8c158-5c5b-4e1c-9b6c-240c415dba20 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 55s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/egggdc7rasikq

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #970

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/970/display/redirect?page=changes>

Changes:

[github] Merge pull request #11163 from [BEAM-9548] Add better error handling to


------------------------------------------
[...truncated 62.90 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0324211805-ffbc45a6_bbfc3309-e42b-424e-9f5c-a7d949dafa26 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 10s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/p5yadnuka4kpo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #969

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/969/display/redirect?page=changes>

Changes:

[github] [BEAM-9579] Fix numpy logic operators (#11204)


------------------------------------------
[...truncated 63.30 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, chardet, urllib3, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0324203150-f8b9f30_5cdf36ef-abb9-43a5-b7b4-6782cfaeee46 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 41s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/kczjao4bw2b3i

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #968

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/968/display/redirect>

Changes:


------------------------------------------
[...truncated 63.07 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, chardet, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0324183447-625b3e25_1a107491-1a73-4e17-9063-5eb8de9f4819 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 22s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ujx62qvpu7dxy

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #967

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/967/display/redirect?page=changes>

Changes:

[github] [BEAM-7923] Pop failed transform when error is raised (#11174)


------------------------------------------
[...truncated 63.09 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, urllib3, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0324175742-19d368a2_d4999155-893f-4b10-8170-c9972ea134aa failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 57s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/hwfdznt4q46b4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #966

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/966/display/redirect?page=changes>

Changes:

[piotr.szuberski] [BEAM-9507] Fix python dependency check task


------------------------------------------
[...truncated 63.11 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, urllib3, chardet, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0324145302-bbfb3df8_9092d340-61ec-46b9-9eaf-4833cdeee660 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 16s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/vvthgym4zxob2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #965

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/965/display/redirect>

Changes:


------------------------------------------
[...truncated 62.86 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, certifi, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0324120925-a46e6188_a6ef9dc9-c94e-4ef1-90e9-37f274a0ff58 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 7s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/4gi3rowajk2n4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #964

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/964/display/redirect>

Changes:


------------------------------------------
[...truncated 63.29 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, urllib3, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0324060935-629874fa_d593b918-a99c-4567-b148-6f2f33cb84b2 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 12s
64 actionable tasks: 50 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/pqouzxeoe6vie

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #963

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/963/display/redirect?page=changes>

Changes:

[github] Merge pull request #11074: Store logical type values in Row instead of


------------------------------------------
[...truncated 62.75 KB...]
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Already using interpreter /usr/bin/python3.7
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly


if [ "${BASH_SOURCE-}" = "$0" ]; then
    echo "You must source this script: \$ source $0" >&2
    exit 33
fi

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, certifi, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0324011725-68afd65f_245ca12c-e644-4ae8-b24a-21ff1a716d3a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 9s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/s5i6s33ljnlsg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #962

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/962/display/redirect>

Changes:


------------------------------------------
[...truncated 62.99 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0324001029-a429a5c0_732b22d5-c32f-4a7e-bef9-5d8e3cf095a8 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 29s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/nke636vdeqnzc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #961

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/961/display/redirect?page=changes>

Changes:

[github] Merge pull request #11198 from [BEAM-7923] Obfuscates display ids


------------------------------------------
[...truncated 62.63 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, idna, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0323220815-4c866a5f_e4fa5c45-3291-4728-a2a0-760a861962d2 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 51s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/orfaqxwgrfqdc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #960

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/960/display/redirect?page=changes>

Changes:

[github] optionally import grpc (#11187)

[github] [BEAM-9305] Allow value provider query strings in _CustomBigQuerySource


------------------------------------------
[...truncated 62.92 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, certifi, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0323211014-3f793ac7_00b29f7d-ae45-4ceb-ba80-4bc5953c89fa failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 20s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ksssu4bbn4cm6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #959

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/959/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-9430] Fix coder sent to Dataflow service for non-portable

[github] Merge pull request #10990: [BEAM-9569] disable coder inference for rows


------------------------------------------
[...truncated 63.12 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, chardet, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0323201539-6df9f0d4_0c6a9daf-ac56-4d2a-8b85-99eb1b9fe758 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 23s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ytudsrsy563z2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #958

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/958/display/redirect>

Changes:


------------------------------------------
[...truncated 63.06 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0323182227-dd4ea87_5d1d8f32-c852-4c64-a19f-e16690053d05 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 41s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/z3lgvcmz64kwo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #957

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/957/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-9565] Fix threading issue with WatermarkEstimatorsTest


------------------------------------------
[...truncated 62.93 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, certifi, idna, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0323165428-9c01ec36_d89d5d8f-2481-4a60-ab19-770ca500330f failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 28s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/k25emtoltfyp2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #956

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/956/display/redirect?page=changes>

Changes:

[piotr.szuberski] [BEAM-9563] Change ToListCombineFn access level to private


------------------------------------------
[...truncated 65.15 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, idna, certifi, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0323154239-e80adf7a_ae986cab-8532-41b4-b42a-c24e1dc3aad4 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 11m 19s
64 actionable tasks: 59 executed, 4 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/xctffvzvrozkw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #955

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/955/display/redirect>

Changes:


------------------------------------------
[...truncated 63.51 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0323120912-dfe19d2e_8429c534-8a0a-4d52-9ccb-b68f72f7ca68 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 55s
64 actionable tasks: 50 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/d626x7fkecwm2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #954

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/954/display/redirect>

Changes:


------------------------------------------
[...truncated 62.62 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0323061802-f11e3cc_7730e6ea-bbf1-4389-b59c-f7942f70ac89 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 41s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ajnqcqdsfcvj4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #953

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/953/display/redirect?page=changes>

Changes:

[relax] switch cogbk to use Beam transform

[relax] finish join

[relax] support side-input joins

[relax] support side-input joins

[relax] spotless

[relax] make FieldAccessDescriptor always be field-insertion order

[relax] fix side-input joins

[relax] fix bug

[relax] remove obsolete test

[relax] add javadoc

[relax] add unit tests

[relax] update sql transform


------------------------------------------
[...truncated 63.09 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, certifi, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0323055926-2a336e00_2d17f26e-1da8-4ee1-835f-7037ddf6d885 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 7s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/rnbaue2q4xc6q

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #952

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/952/display/redirect>

Changes:


------------------------------------------
[...truncated 63.08 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, chardet, certifi, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0323000917-4754bb09_cccec07a-ff20-4af2-8c5a-c6ecc9010dfe failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 57s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/wanqsot7frxoo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #951

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/951/display/redirect>

Changes:


------------------------------------------
[...truncated 62.84 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0322180904-cc43f407_5372ff42-b035-45a5-94bc-47b52d0d0ce9 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 44s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/zxl2mv4dbnaga

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #950

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/950/display/redirect>

Changes:


------------------------------------------
[...truncated 62.62 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, chardet, urllib3, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0322120841-b468171c_d9f01b51-b52e-4d79-84ff-3fb1ca8b2f02 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 23s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/rgpjkp6va5a4c

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #949

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/949/display/redirect>

Changes:


------------------------------------------
[...truncated 62.84 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, chardet, idna, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0322060900-3cca4d26_f6802f20-c0ba-4377-b83a-adbb8e791f9b failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 40s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/5czv2tg3y7wyk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #948

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/948/display/redirect>

Changes:


------------------------------------------
[...truncated 62.62 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0322000842-b1c5c61e_a18f6f27-2e03-4861-9ec9-ec6c4613ac64 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 25s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/yxw5vpbtju6pw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #947

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/947/display/redirect>

Changes:


------------------------------------------
[...truncated 63.07 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0321180918-133d2955_9bef765e-3879-4217-b19f-9407926d843d failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 59s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/jpw5azk3yysty

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #946

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/946/display/redirect>

Changes:


------------------------------------------
[...truncated 62.84 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, certifi, chardet, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0321120858-75d71e75_2c0fda4d-d4ed-4194-8a64-14196443efe6 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 39s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/w7lqt24gkk66w

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #945

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/945/display/redirect>

Changes:


------------------------------------------
[...truncated 62.99 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0321061003-eb08f0d_3ad71088-e885-4c7d-84c3-8b5df0a6bd79 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 43s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/o2tndmozxb5uu

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #944

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/944/display/redirect?page=changes>

Changes:

[ehudm] [BEAM-8280] Enable type hint annotations

[robertwb] [BEAM-9558] Add an explicit end field to the data channel protos.

[robertwb] [BEAM-9558] Regenerate go protos.

[robertwb] [BEAM-9558] Produce and respect data channel end bit in runners and

[github] Merge pull request #11153 from [BEAM-9537] Adding a new module for


------------------------------------------
[...truncated 63.19 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, chardet, urllib3, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0321001526-dbb7052d_8cab313a-8aaa-46ec-9ade-8b23181cef91 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 52s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/7qq3bkpel77me

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #943

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/943/display/redirect?page=changes>

Changes:

[daniel.o.programmer] [BEAM-3301] Bugfix in DoFn validation.


------------------------------------------
[...truncated 62.86 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0320223914-aeeb73a3_772ae11b-49f3-4ed4-a016-96a9d55e7a13 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 51s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/mnr3y753upsjs

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #942

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/942/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-9339, BEAM-2939] Drop splittable field from proto, add splittable

[boyuanz] Add Timer to Elements proto representation.


------------------------------------------
[...truncated 63.08 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, certifi, urllib3, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0320194502-2d0821ad_4d94ff85-4dc3-48f8-814d-a3ee853e7068 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 39s
64 actionable tasks: 50 executed, 13 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/dsfkzcldvwqnw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #941

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/941/display/redirect>

Changes:


------------------------------------------
[...truncated 62.61 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, idna, chardet, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0320184111-f21d4b82_367343a3-5cd5-4b6f-9095-2069f7e338d7 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 56s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/iykfkr5w3memo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #940

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/940/display/redirect?page=changes>

Changes:

[kawaigin] Remove the excessive logging from capturable sources property.


------------------------------------------
[...truncated 62.85 KB...]
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, chardet, idna, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Waiting for grpc channel to be ready at localhost:37623.
WARNING:root:Waiting for grpc channel to be ready at localhost:37623.
WARNING:root:Waiting for grpc channel to be ready at localhost:37623.
WARNING:root:Waiting for grpc channel to be ready at localhost:37623.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0320174356-acb94201_db381e99-5f64-4b58-9564-7073f5eb250e failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 11m 19s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/rkp4lc2bsoqjm

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #939

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/939/display/redirect>

Changes:


------------------------------------------
[...truncated 62.89 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0320120925-13d94823_6b554a6c-8954-47ee-99d5-fc5e0631a9db failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 6s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/aca7ixfy5jxp2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #938

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/938/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-9430] Update CHANGES.md to reflect removal of


------------------------------------------
[...truncated 63.60 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0320094704-269dd72d_94860e75-073b-4cfa-b46d-c883859100bb failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 51s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/trcxew2lzj2ak

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #937

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/937/display/redirect>

Changes:


------------------------------------------
[...truncated 63.07 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, chardet, certifi, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0320060915-484e75ba_a58c7ae0-9925-4235-bbb7-fa0dd8328e1e failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 53s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ureplc36gcszs

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #936

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/936/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-4374] Define the protos for a "short" id mechanism for metrics

[lcwik] fixup! Address PR comments.


------------------------------------------
[...truncated 64.71 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, urllib3, chardet, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0320044942-8308e8d5_8f11e06a-b560-43f2-8fac-7a6051d27cbd failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 44s
64 actionable tasks: 56 executed, 7 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/2jwmaau65og5c

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #935

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/935/display/redirect?page=changes>

Changes:

[github] Merge pull request #11128 from [BEAM-9524] Fix for ib.show() executing


------------------------------------------
[...truncated 63.34 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, urllib3, certifi, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0320021232-f9c14ab9_0bb86708-8acd-49fc-b49f-10100756e2b9 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 12m 47s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/mcv5r3w7c4izy

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #934

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/934/display/redirect?page=changes>

Changes:

[daniel.o.programmer] [BEAM-3301] Perform SDF validation (missing RestrictionTrackers).


------------------------------------------
[...truncated 63.12 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0320005730-25d7c5ce_29bbffad-c3e9-4319-bc58-8db5e1a42f4c failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 49s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/v6iv5ttdlqu22

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #933

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/933/display/redirect>

Changes:


------------------------------------------
[...truncated 63.09 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, urllib3, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0320001546-7e77c516_a0ce666d-ef35-4acb-b9da-a563f45d5ca5 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 9s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/c5xe6inpcbx2q

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #932

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/932/display/redirect?page=changes>

Changes:

[github]  [BEAM-9552] Bump TestPubsub subscription creation ACK deadline to 60s


------------------------------------------
[...truncated 63.36 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319235324-622ce3f1_704d19cc-12a1-4644-a7d1-44aadb02055a failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 31s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/hcctrw3tfeb66

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #931

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/931/display/redirect?page=changes>

Changes:

[github] fix typo at Python Package name (#11098)


------------------------------------------
[...truncated 62.83 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, chardet, idna, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319231323-c031e28a_9f80a3f8-1b1d-4f8f-8f54-7de7fcd04ccf failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 23s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/2qewnwmx54r62

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #930

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/930/display/redirect?page=changes>

Changes:

[github] Merge pull request #11166 from [BEAM-7923] Emit info when capture


------------------------------------------
[...truncated 63.89 KB...]
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, idna, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Waiting for grpc channel to be ready at localhost:40695.
WARNING:root:Waiting for grpc channel to be ready at localhost:40695.
WARNING:root:Waiting for grpc channel to be ready at localhost:40695.
WARNING:root:Waiting for grpc channel to be ready at localhost:40695.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319220111-93c4d983_44b91bc5-0a09-4f92-9f40-79be18fa9bdb failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 11m 0s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/qpa2ukwrrvv6i

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #929

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/929/display/redirect?page=changes>

Changes:

[crites] Clean up of TestStreamTranscriptTests. Remvoes check for final field in

[crites] Adds clearing of pane info state when windows get merged away.


------------------------------------------
[...truncated 62.64 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, chardet, urllib3, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319193351-46ec6ee7_a0adfc08-8a32-4c1c-beaf-d2422c2b0656 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 52s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/aph4fyzpyrawg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #928

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/928/display/redirect?page=changes>

Changes:

[iemejia] Move CHANGES template related items into template section


------------------------------------------
[...truncated 63.14 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, chardet, urllib3, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319183710-4e889c7a_b0dee696-7314-4699-909b-240d68eef827 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 21s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/u2zute5mpdmvk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #927

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/927/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-9430] Migrate from ProcessContext#updateWatermark to


------------------------------------------
[...truncated 514.43 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, certifi, urllib3, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Waiting for grpc channel to be ready at localhost:59787.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319174155-c3120f0d_758ae3e1-e958-423c-ac57-a02e9fb4f1b5 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR
>>> FAILURE

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 20m 34s
64 actionable tasks: 59 executed, 4 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/7ritxo7rv5sio

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #926

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/926/display/redirect?page=changes>

Changes:

[lcwik] [BEAM-9540] Rename beam:source:runner:0.1/beam:sink:runner:0.1 to

[robertwb] [BEAM-9535] Remove unused ParDoPayload.Parameters.

[robertwb] [BEAM-9339] Declare capabilities in the Java SDK.

[robertwb] [BEAM-9339] Add additional Java capabilities.


------------------------------------------
[...truncated 62.70 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, urllib3, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319164529-6634c014_3f3e44bf-0a38-405b-8b09-4140f17e4bb3 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 3s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/qhs37nhfwzmks

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #925

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/925/display/redirect?page=changes>

Changes:

[github] [BEAM-9551] Environment PB Pointer cleanup (#11164)


------------------------------------------
[...truncated 62.82 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, chardet, urllib3, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319160256-4f509450_774b8a44-6755-42e9-bfcd-c74779232495 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 11s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/3ybn54soqequ4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #924

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/924/display/redirect?page=changes>

Changes:

[jfarr1] [BEAM-9470] fix flaky unit test in :sdks:java:io:kinesis


------------------------------------------
[...truncated 62.56 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, chardet, urllib3, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319133431-a0368bf6_400d421a-c09a-4cb3-a7e8-8603bfcb5a62 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 53s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/wkomwfhqzmrek

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #923

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/923/display/redirect>

Changes:


------------------------------------------
[...truncated 63.35 KB...]

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, certifi, chardet, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Waiting for grpc channel to be ready at localhost:60817.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319121609-930127f4_e84cd066-6167-4f76-a3ef-49f9aaea3e67 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 13s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/wosv5rxnr5iem

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #922

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/922/display/redirect?page=changes>

Changes:

[iemejia] [BEAM-9279] Refactor HBase to disminish relying on Serializable wrappers

[iemejia] [BEAM-9279] Make HBase.ReadAll based on Reads instead of HBaseQuery


------------------------------------------
[...truncated 63.12 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, certifi, chardet, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319111923-1d63817a_b607ceee-f9fd-49a1-a6ca-b333388e620d failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 44s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/rpaapgaqysabc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #921

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/921/display/redirect?page=changes>

Changes:

[kcweaver] [BEAM-9553] Use latest Flink job server image as default.


------------------------------------------
[...truncated 63.10 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, idna, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319094752-96dc8b63_da419c2b-9085-4544-abfa-a89b52ea1eeb failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 39s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/37e5hweshny2k

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #920

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/920/display/redirect?page=changes>

Changes:

[alex] [BEAM-9035] BIP-1: Typed options for Row Schema and Field


------------------------------------------
[...truncated 515.95 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, certifi, chardet, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319081659-ecdc6201_08e21252-b56c-44de-9da6-b8cbbf94d524 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 58s
64 actionable tasks: 62 executed, 1 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/dxydel7rhspuu

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #919

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/919/display/redirect?page=changes>

Changes:

[coheigea] BEAM-8924 - Update Apache Tika to 1.24


------------------------------------------
[...truncated 62.64 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, idna, urllib3, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319072209-781c516a_52476a5a-6354-40d0-879e-33575512cfbf failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 10s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/rzenq2j4dtmpi

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #918

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/918/display/redirect>

Changes:


------------------------------------------
[...truncated 62.62 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, certifi, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319061946-a77840de_e12922ab-13c2-418e-8cc2-c58d71399b40 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 57s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/5z2fgupgcysma

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #917

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/917/display/redirect?page=changes>

Changes:

[github] Merge pull request #11158 from [BEAM-9533] Fixing tox.ini variants


------------------------------------------
[...truncated 62.83 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, certifi, chardet, idna, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319050856-e9ac75cc_ab5c44e1-e1ac-4a41-b326-63585e087ca4 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 4s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/wi6hnsy4jt3mw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #916

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/916/display/redirect>

Changes:


------------------------------------------
[...truncated 63.08 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0319002817-a61a1b11_025167b9-186b-4504-ba7c-e3646f130dd7 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 1s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ir5ychwumre2s

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #915

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/915/display/redirect?page=changes>

Changes:

[apilloud] [BEAM-9511] Uncollect takes arbitrary expressions

[apilloud] [BEAM-9515] Add test


------------------------------------------
[...truncated 63.10 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, urllib3, idna, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0318232859-15c24afb_8de23de5-9066-43b8-9874-04ecf9af409b failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 52s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/pgwuce4iriqna

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #914

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/914/display/redirect?page=changes>

Changes:

[apilloud] [BEAM-7832] Translate ZetaSQL joins without condition


------------------------------------------
[...truncated 62.44 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, idna, chardet, certifi, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0318230242-1961daf5_d84cd2c8-a064-4c2f-af4a-78840c030745 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 46s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/z36a6z4yqdgcw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #913

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/913/display/redirect?page=changes>

Changes:

[kawaigin] [BEAM-7923] Change Transform Label Prefix Syntax

[github] Specify return types of window start/end functions explicitly (#11152)


------------------------------------------
[...truncated 62.91 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0318220535-e414b0aa_88713ea7-1e06-4390-b6f7-bb4913c54b51 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 21s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/3vf3vblatw3dk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #912

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/912/display/redirect>

Changes:


------------------------------------------
[...truncated 63.10 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, certifi, chardet, urllib3, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0318190757-80e12464_8de008a2-334a-41f0-bd22-4ccdfa1df4b3 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 53s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/kjanntclc4kme

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #911

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/911/display/redirect?page=changes>

Changes:

[github] Merge pull request #11147 from [BEAM-7923] Support dict and iterable


------------------------------------------
[...truncated 62.91 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0318180728-481eb468_b4c6bde9-73e8-4f6c-924b-e56edd3b9cc9 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 11s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/msavvd3o3n36s

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #910

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/910/display/redirect?page=changes>

Changes:

[github] [BEAM-9526] Add missing unmarshalling in top.LargestPerKey. (#11143)


------------------------------------------
[...truncated 63.55 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, docopt, urllib3, chardet, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0318165014-f1cf4dc7_d82b76dc-655c-477f-81af-70def6e721f7 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
>>> FAILURE
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 52s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/hbrbgnnkcaxfi

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #909

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/909/display/redirect?page=changes>

Changes:

[github] [BEAM-9539] Fix copy-pasted comment in load-tests' build.gradle (#11155)


------------------------------------------
[...truncated 62.63 KB...]
Docker version 18.09.4, build d14af54

# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.21.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/50/24/4d/4580ca4a299f1ad6fd63443e6e584cb21e9a07988e4aa8daac/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/59/b1/91/f02e76c732915c4015ab4010f3015469866c1eb9b14058d8e7/dill-0.3.1.1-cp37-none-any.whl
Collecting fastavro<0.22,>=0.21.4
  Using cached fastavro-0.21.24-cp37-cp37m-manylinux1_x86_64.whl (1.2 MB)
Processing /home/jenkins/.cache/pip/wheels/8b/99/a0/81daf51dcd359a9377b110a8a886b3895921802d2fc1b2397e/future-0.18.2-cp37-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.27.2-cp37-cp37m-manylinux2010_x86_64.whl (2.7 MB)
Processing /home/jenkins/.cache/pip/wheels/fe/a7/05/23e3699975fc20f8a30e00ac1e515ab8c61168e982abe4ce70/hdfs-2.5.8-cp37-none-any.whl
Processing /home/jenkins/.cache/pip/wheels/6d/41/4b/2b369d6e2b7eaebcdd423516d3fb659c7658c16a2be8fd04ec/httplib2-0.12.0-cp37-none-any.whl
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.2-cp37-cp37m-manylinux1_x86_64.whl (20.2 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/48/f7/87/b932f09c6335dbcf45d916937105a372ab14f353a9ca431d7d/oauth2client-3.0.0-cp37-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.11.3-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2019.3-py2.py3-none-any.whl (509 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.1-py3-none-any.whl (20 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.16.0,>=0.15.1
  Using cached pyarrow-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl (59.2 MB)
Collecting six>=1.5.2
  Using cached six-1.14.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.4-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.0-py2.py3-none-any.whl (38 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.21.0.dev0) (46.0.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.6-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2019.11.28-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.8-py2.py3-none-any.whl (125 kB)
Installing collected packages: crcmod, dill, fastavro, future, six, grpcio, chardet, idna, certifi, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, pytz, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.21.24 future-0.18.2 grpcio-1.27.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.9 mock-2.0.0 numpy-1.18.2 oauth2client-3.0.0 pbr-5.4.4 protobuf-3.11.3 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.6 python-dateutil-2.8.1 pytz-2019.3 requests-2.23.0 rsa-4.0 six-1.14.0 typing-extensions-3.7.4.1 urllib3-1.25.8

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 545, in wait_until_finish
    (self._job_id, self._state, self._last_error_message()))
RuntimeError: Pipeline BeamApp-jenkins-0318125745-4a05e070_bb8d756c-8da4-49c6-b876-ae1ad7372757 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 160

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 16s
64 actionable tasks: 49 executed, 14 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/mxpx6fxaszyvu

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org