You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2021/04/22 00:24:46 UTC

Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #2764

See <https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/2764/display/redirect?page=changes>

Changes:

[sychen] Add a BQ option for configuring buffering duration when auto-sharding is

[suztomo] [BEAM-11010] Upgrading google-cloud-pubsublite to 0.13.2

[suztomo] [BEAM-11010] Copying SubscriberOptions from pubsublite repo

[suztomo] [BEAM-11010] Declaring flogger-system-backend to avoid conflicts

[Kyle Weaver] [BEAM-12194] Enable SqlTransform::registerUdaf in ZetaSQL.

[Kyle Weaver] [BEAM-12194] Code style changes from review.

[Boyuan Zhang] [BEAM-12114] Dataflow should apply KAFKA_READ_OVERRIDE when it's not

[noreply] Make sdk/worker_harness_container_image fully backwards compatible


------------------------------------------
[...truncated 629.40 KB...]
apache/beam_python3.7_sdk:2.30.0.dev

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
Using base prefix '/usr'
New python executable in <https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/2022703441/bin/python3.7>
Also creating executable in <https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/2022703441/bin/python>
Installing setuptools, pip, wheel...
done.
Running virtualenv with interpreter /usr/bin/python3.7
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/2022703441">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining file://<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Collecting avro-python3!=1.9.2,<1.10.0,>=1.8.1
  Using cached avro_python3-1.9.2.1-py3-none-any.whl
Collecting crcmod<2.0,>=1.7
  Using cached crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Collecting dill<0.3.2,>=0.3.1.1
  Using cached dill-0.3.1.1-py3-none-any.whl
Collecting fastavro<2,>=0.21.4
  Using cached fastavro-1.4.0-cp37-cp37m-manylinux2014_x86_64.whl (2.2 MB)
Collecting future<1.0.0,>=0.18.2
  Using cached future-0.18.2-py3-none-any.whl
Collecting grpcio<2,>=1.29.0
  Using cached grpcio-1.37.0-cp37-cp37m-manylinux2014_x86_64.whl (4.2 MB)
Collecting hdfs<3.0.0,>=2.1.0
  Using cached hdfs-2.6.0-py3-none-any.whl (33 kB)
Collecting httplib2<0.20.0,>=0.8
  Using cached httplib2-0.19.1-py3-none-any.whl (95 kB)
Collecting numpy<1.21.0,>=1.14.3
  Using cached numpy-1.20.2-cp37-cp37m-manylinux2010_x86_64.whl (15.3 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached pymongo-3.11.3-cp37-cp37m-manylinux2014_x86_64.whl (512 kB)
Collecting oauth2client<5,>=2.0.1
  Using cached oauth2client-4.1.3-py2.py3-none-any.whl (98 kB)
Collecting protobuf<4,>=3.12.2
  Using cached protobuf-3.15.8-cp37-cp37m-manylinux1_x86_64.whl (1.0 MB)
Collecting pyarrow<4.0.0,>=0.15.1
  Using cached pyarrow-3.0.0-cp37-cp37m-manylinux2014_x86_64.whl (20.7 MB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.2-py2.py3-none-any.whl (21 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
  Using cached pytz-2021.1-py2.py3-none-any.whl (510 kB)
Collecting requests<3.0.0,>=2.24.0
  Using cached requests-2.25.1-py2.py3-none-any.whl (61 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
  Using cached typing_extensions-3.7.4.3-py3-none-any.whl (22 kB)
Collecting six>=1.5.2
  Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Collecting docopt
  Using cached docopt-0.6.2-py2.py3-none-any.whl
Collecting pyparsing<3,>=2.4.2
  Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting pyasn1>=0.1.7
  Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting rsa>=3.1.4
  Using cached rsa-4.7.2-py3-none-any.whl (34 kB)
Collecting pyasn1-modules>=0.0.5
  Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2020.12.5-py2.py3-none-any.whl (147 kB)
Collecting idna<3,>=2.5
  Using cached idna-2.10-py2.py3-none-any.whl (58 kB)
Collecting urllib3<1.27,>=1.21.1
  Using cached urllib3-1.26.4-py2.py3-none-any.whl (153 kB)
Collecting chardet<5,>=3.0.2
  Using cached chardet-4.0.0-py2.py3-none-any.whl (178 kB)
Installing collected packages: urllib3, pyparsing, pyasn1, idna, chardet, certifi, six, rsa, requests, pyasn1-modules, numpy, httplib2, docopt, typing-extensions, pytz, python-dateutil, pymongo, pydot, pyarrow, protobuf, oauth2client, hdfs, grpcio, future, fastavro, dill, crcmod, avro-python3, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.12.5 chardet-4.0.0 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-1.4.0 future-0.18.2 grpcio-1.37.0 hdfs-2.6.0 httplib2-0.19.1 idna-2.10 numpy-1.20.2 oauth2client-4.1.3 protobuf-3.15.8 pyarrow-3.0.0 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.2 pymongo-3.11.3 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2021.1 requests-2.25.1 rsa-4.7.2 six-1.15.0 typing-extensions-3.7.4.3 urllib3-1.26.4

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR="test-pipeline-${RUNNER}-$(date +%Y%m%d-%H%M%S).jar"
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_options "docker_container_image=$PYTHON_CONTAINER_IMAGE" \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi
Exception in thread "main" java.lang.RuntimeException: Job BeamApp-jenkins-0422002213-30b6eb69_c0029942-9184-48d2-9168-f5b0ca2c2a99 failed.
	at org.apache.beam.runners.spark.SparkPipelineRunner.main(SparkPipelineRunner.java:270)
Caused by: java.lang.NullPointerException
	at org.apache.beam.runners.spark.SparkPipelineRunner.run(SparkPipelineRunner.java:120)
	at org.apache.beam.runners.spark.SparkPipelineRunner.main(SparkPipelineRunner.java:268)

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR
>>> FAILURE

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :sdks:python:test-suites:portable:py37:testJavaJarCreatorSparkRunner FAILED

FAILURE: Build failed with an exception.

* Where:
Script '<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/test-suites/portable/common.gradle'> line: 211

* What went wrong:
Execution failed for task ':sdks:python:test-suites:portable:py37:testJavaJarCreatorSparkRunner'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 24m 17s
89 actionable tasks: 79 executed, 8 from cache, 2 up-to-date
Gradle was unable to watch the file system for changes. The inotify watches limit is too low.

Publishing build scan...
https://gradle.com/s/kumikl4mhbkcg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Jenkins build is back to normal : beam_PostCommit_PortableJar_Spark #2766

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/2766/display/redirect?page=changes>


---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_PortableJar_Spark #2765

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/2765/display/redirect?page=changes>

Changes:

[noreply] [BEAM-12191] Add a test for python template generation with upload_graph

[noreply] [BEAM-7372] fix wrong usage of with_traceback (#14566)

[noreply] [BEAM-7372] cleanup codes for py2 from apache_beam/transforms (#14544)

[noreply] [BEAM-2085] Fixups for Python resource hints. (#14605)


------------------------------------------
[...truncated 62.26 KB...]
> Task :sdks:python:container:resolveBuildDependencies
Resolving ./github.com/apache/beam/sdks/go@<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/go>

> Task :sdks:python:container:installDependencies
> Task :sdks:python:container:buildDarwinAmd64
> Task :sdks:python:container:buildLinuxAmd64
> Task :sdks:python:container:goBuild
> Task :sdks:python:container:py37:copyLauncherDependencies
> Task :sdks:python:container:py37:dockerPrepare
> Task :sdks:python:container:py37:docker

> Task :sdks:python:test-suites:portable:py37:testJavaJarCreatorSparkRunner

while [[ $# -gt 0 ]]
do
key="$1"
case $key in
    --job_server_jar)
        JOB_SERVER_JAR="$2"
        shift # past argument
        shift # past value
        ;;
    --runner)
        RUNNER="$2"
        shift # past argument
        shift # past value
        ;;
    --env_dir)
        ENV_DIR="$2"
        shift # past argument
        shift # past value
        ;;
    --python_root_dir)
        PYTHON_ROOT_DIR="$2"
        shift # past argument
        shift # past value
        ;;
    --python_version)
        PYTHON_VERSION="$2"
        shift # past argument
        shift # past value
        ;;
    --python_container_image)
        PYTHON_CONTAINER_IMAGE="$2"
        shift # past argument
        shift # past value
        ;;
    *)    # unknown option
        echo "Unknown option: $1"
        exit 1
        ;;
esac
done

# Go to the root of the repository
cd $(git rev-parse --show-toplevel)
git rev-parse --show-toplevel

# Verify docker command exists
command -v docker
docker -v

# Verify container has already been built
echo "Checking for Docker image ${PYTHON_CONTAINER_IMAGE}"
docker images --format "{{.Repository}}:{{.Tag}}" | grep $PYTHON_CONTAINER_IMAGE

# Set up Python environment
virtualenv -p python$PYTHON_VERSION $ENV_DIR
. $ENV_DIR/bin/activate
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly

deactivate () {
    unset -f pydoc >/dev/null 2>&1

    # reset old environment variables
    # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
    if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
        PATH="$_OLD_VIRTUAL_PATH"
        export PATH
        unset _OLD_VIRTUAL_PATH
    fi
    if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
        export PYTHONHOME
        unset _OLD_VIRTUAL_PYTHONHOME
    fi

    # This should detect bash and zsh, which have a hash command that must
    # be called to get it to forget past commands.  Without forgetting
    # past commands the $PATH changes we made may not be respected
    if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
        hash -r 2>/dev/null
    fi

    if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
        PS1="$_OLD_VIRTUAL_PS1"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/2022703441">
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1-}"
    if [ "x" != x ] ; then
        PS1="${PS1-}"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
    fi
    export PS1
fi
basename "$VIRTUAL_ENV"

# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true

pydoc () {
    python -m pydoc "$@"
}

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
    hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR="test-pipeline-${RUNNER}-$(date +%Y%m%d-%H%M%S).jar"
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_options "docker_container_image=$PYTHON_CONTAINER_IMAGE" \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:root:Waiting for grpc channel to be ready at localhost:51625.
WARNING:root:Waiting for grpc channel to be ready at localhost:51625.
WARNING:root:Waiting for grpc channel to be ready at localhost:51625.
WARNING:root:Waiting for grpc channel to be ready at localhost:51625.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args: ['--parallelism', '1']

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi
Exception in thread "main" java.lang.RuntimeException: Job BeamApp-jenkins-0422061929-eb807499_324e480e-a81c-4c49-b0d9-d2410f60f99e failed.
	at org.apache.beam.runners.spark.SparkPipelineRunner.main(SparkPipelineRunner.java:270)
Caused by: java.lang.NullPointerException
	at org.apache.beam.runners.spark.SparkPipelineRunner.run(SparkPipelineRunner.java:120)
	at org.apache.beam.runners.spark.SparkPipelineRunner.main(SparkPipelineRunner.java:268)

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE

> Task :sdks:python:test-suites:portable:py37:testJavaJarCreatorSparkRunner FAILED

FAILURE: Build failed with an exception.

* Where:
Script '<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/test-suites/portable/common.gradle'> line: 211

* What went wrong:
Execution failed for task ':sdks:python:test-suites:portable:py37:testJavaJarCreatorSparkRunner'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 20m 25s
89 actionable tasks: 63 executed, 24 from cache, 2 up-to-date

Publishing build scan...
https://gradle.com/s/n4jorxxfhukbk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org