You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2020/06/13 18:10:09 UTC

Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1515

See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1515/display/redirect>

Changes:


------------------------------------------
[...truncated 64.42 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py2.py3-none-any.whl (34 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, docopt, urllib3, chardet, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0613181004-6035a435_6bea4066-9fde-4f26-baa8-012d4def3644 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 44s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/q4wueyuwozmgc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1530

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1530/display/redirect>

Changes:


------------------------------------------
[...truncated 64.37 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.29.0
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py3-none-any.whl (47 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.3.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, docopt, chardet, idna, certifi, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0617121024-8215bf52_835ea8ee-7afb-4f65-8084-e76b074c1cc1 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 58s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/j5iish5vcyrcg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1529

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1529/display/redirect?page=changes>

Changes:

[github] [BEAM-10217] CALL_FUNCTION and CALL_METHOD fixes (#11966)


------------------------------------------
[...truncated 64.16 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.29.0
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py3-none-any.whl (47 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.3.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, docopt, chardet, idna, urllib3, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0617060957-9ec19441_c5db55fb-9e53-4ca4-b97d-289bf3465faa failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 33s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ieb36xf3eme54

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1528

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1528/display/redirect?page=changes>

Changes:

[heejong] [BEAM-10208] add cross-language KafkaIO integration test

[rionmonster] added kotlin katas release blog post with associated images

[rionmonster] fixed up verbiage

[kcweaver] [BEAM-9852] Do not create data channel for empty timer descriptor.

[kcweaver] Fix state handler for missing service descriptor.

[github] Merge pull request #11838 from [BEAM-9322] Modify the TestStream to

[github] [BEAM-10251] Adds transform id to TestStream step (#12003)

[github] [BEAM-7672] Increase  the set of acceptable Python wheels in Beam Python

[github] Merge pull request #11790 from [BEAM-9926] Programming guide - Fix typos

[github] [BEAM-9679] Update Stepik course information (#12018)

[github] [BEAM-10169] ParDo functions with correct output N in their error


------------------------------------------
[...truncated 64.38 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.29.0
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py3-none-any.whl (47 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.3.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, docopt, urllib3, chardet, certifi, idna, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0617000957-e82e3744_0b73c3d0-340b-40bb-82a3-88097535b958 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 32s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/jsvddjxzcjfg6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1527

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1527/display/redirect?page=changes>

Changes:

[mxm] [BEAM-10260] Fix continuation token support with statecache

[mxm] [BEAM-10260] Remove is_cached parameter from CachingStateHandler

[github] Merge pull request #11086 from [BEAM-8910] Make custom BQ source read


------------------------------------------
[...truncated 64.40 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.29.0
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py3-none-any.whl (47 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.3.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, certifi, chardet, idna, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0616181350-ac870f8_d94305f6-e4ca-4a67-858a-9338e97308a2 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 1s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/fzrzg4ehttbfe

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1526

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1526/display/redirect>

Changes:


------------------------------------------
[...truncated 64.38 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.29.0
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py3-none-any.whl (47 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.3.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, docopt, certifi, urllib3, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0616121015-4468aeaa_f95f9f59-c557-4c18-a30f-1f4d4a0f34bf failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 52s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/5nqa6coq6rrqs

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1525

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1525/display/redirect?page=changes>

Changes:

[ningk] Update screen_diff deps and goldens as stable Chrome version advances.

[robinyqiu] Add zetaSqlValueToJavaObject() with unknown target type

[bhulette] Lump together PMC-only steps


------------------------------------------
[...truncated 64.15 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.29.0
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py3-none-any.whl (47 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.3.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, idna, urllib3, certifi, chardet, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0616061049-3de1195a_55343a56-effa-46cb-aacd-60db96031266 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 19s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/ffb7rnntnymvw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1524

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1524/display/redirect?page=changes>

Changes:

[davidyan] [BEAM-10247] Pin google-api-core to 1.17.0, because otherwise the pulled

[davidyan] Bumping grpcio version to 1.29.0 to be compatible with

[daniel.o.programmer] [BEAM-9951] Fixing some lint bugs.

[stuart.m.perks] BEAM-10221: Add in four tests cases of base on the java equivalent for

[davidyan] added rsa<4.1 for python2

[github] Clarify release guide for publishing release notes to GitHub (#12015)


------------------------------------------
[...truncated 64.16 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.29.0
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py3-none-any.whl (47 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.3.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, idna, chardet, urllib3, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0616001158-de2ba6f9_7b71677f-af20-4fbf-b5df-d291820e889e failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 46s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/rgtem6jpamtpo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1523

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1523/display/redirect?page=changes>

Changes:

[mxm] [BEAM-10249] Populate state cache with initial values before appending


------------------------------------------
[...truncated 63.99 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Downloading rsa-4.6-py3-none-any.whl (47 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.2.0)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, chardet, idna, certifi, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0615181411-84f1a223_849e0d66-9ba2-4822-a14c-b360812fce9b failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 32s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/nroshhrpjbaem

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1522

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1522/display/redirect>

Changes:


------------------------------------------
[...truncated 64.40 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py2.py3-none-any.whl (34 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, chardet, idna, urllib3, certifi, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0615121012-5f7df7b0_5a045b0a-0124-47cc-8a79-3ca3018d4517 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 50s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/5us6pua6rssok

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1521

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1521/display/redirect>

Changes:


------------------------------------------
[...truncated 64.40 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py2.py3-none-any.whl (34 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, certifi, chardet, urllib3, idna, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0615061010-12039761_fbf36e1c-5d41-49bb-b20f-219fc5be91bf failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 47s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/uhkh5qgd7p4ma

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1520

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1520/display/redirect>

Changes:


------------------------------------------
[...truncated 64.40 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py2.py3-none-any.whl (34 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, idna, chardet, certifi, urllib3, requests, docopt, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0615001045-b41305fe_764bd01b-3829-408c-820e-1ef7bc388a38 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 23s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/eg6m6wvuyjyws

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1519

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1519/display/redirect>

Changes:


------------------------------------------
[...truncated 64.37 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py2.py3-none-any.whl (34 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, docopt, urllib3, chardet, idna, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0614181016-613dc47f_946b8af1-02c7-47b9-8f99-fbb3dbe51179 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
>>> FAILURE
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 53s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/o2vyp2wr5u4tw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1518

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1518/display/redirect>

Changes:


------------------------------------------
[...truncated 64.40 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py2.py3-none-any.whl (34 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, docopt, idna, chardet, certifi, urllib3, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0614121001-99fc95e7_cc9fdea1-aa43-4de2-9b5a-d72e6285f7c1 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 35s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/whvwbvmm4c3ro

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1517

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1517/display/redirect?page=changes>

Changes:

[github] [BEAM-9679] Add Partition task to Core Transform katas (#11979)


------------------------------------------
[...truncated 64.40 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py2.py3-none-any.whl (34 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, docopt, certifi, urllib3, idna, chardet, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0614061002-88e7ff01_5aaf97ab-f4c9-4875-abb8-9f403663beec failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
>>> FAILURE
fi
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 36s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/hciptlrehhvwk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #1516

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/1516/display/redirect>

Changes:


------------------------------------------
[...truncated 64.42 KB...]
# Verify container has already been built
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE
apache/beam_python3.7_sdk:2.23.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python3.7>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Processing /home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f/crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Processing /home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f/dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<0.24,>=0.21.4
  Using cached fastavro-0.23.4-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Processing /home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0/future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.12.1
  Using cached grpcio-1.29.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Processing /home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859/hdfs-2.5.8-py3-none-any.whl
Collecting httplib2<0.18.0,>=0.8
  Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
  Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
  Using cached numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Processing /home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1/oauth2client-3.0.0-py3-none-any.whl
Collecting protobuf<4,>=3.5.0.post1
  Using cached protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Processing /home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1/avro_python3-1.9.2.1-py3-none-any.whl
Collecting pyarrow<0.18.0,>=0.15.1
  Using cached pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Processing /home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting requests>=2.7.0
  Using cached requests-2.23.0-py2.py3-none-any.whl (58 kB)
Collecting pbr>=0.11
  Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.6-py2.py3-none-any.whl (34 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Requirement already satisfied: setuptools in ./build/gradleenv/-2047437407/lib/python3.7/site-packages (from protobuf<4,>=3.5.0.post1->apache-beam==2.23.0.dev0) (47.1.1)
Collecting pyparsing>=2.1.4
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.9-py2.py3-none-any.whl (58 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
  Using cached urllib3-1.25.9-py2.py3-none-any.whl (126 kB)
Collecting chardet<4,>=3.0.2
  Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.4.5.2-py2.py3-none-any.whl (157 kB)
Installing collected packages: crcmod, dill, pytz, fastavro, future, six, grpcio, docopt, idna, urllib3, chardet, certifi, requests, hdfs, httplib2, pbr, mock, numpy, pymongo, pyasn1, rsa, pyasn1-modules, oauth2client, protobuf, pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.4.5.2 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.4 future-0.18.2 grpcio-1.29.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.9 mock-2.0.0 numpy-1.18.5 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 requests-2.23.0 rsa-4.6 six-1.15.0 typing-extensions-3.7.4.2 urllib3-1.25.9

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
  File "<string>", line 24, in <module>
  File "<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 546, in wait_until_finish
    raise self._runtime_exception
RuntimeError: Pipeline BeamApp-jenkins-0614000956-f2c70f82_7c5e44f9-eba2-4054-80ca-bb652cc4ef67 failed in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar on sun.misc.Launcher$AppClassLoader@1b6d3586

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
>>> FAILURE
exit $TEST_EXIT_CODE

> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'> line: 166

* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 29s
67 actionable tasks: 51 executed, 15 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/3cdw4jgb2hgwy

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org