You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2022/03/11 06:40:23 UTC

Build failed in Jenkins: beam_PostCommit_XVR_Spark #3457

See <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/3457/display/redirect?page=changes>

Changes:

[noreply] Merge pull request #17056 from [BEAM-14076] [SnowflakeIO] Add support


------------------------------------------
[...truncated 282.87 KB...]
  >
  coders: <
    key: "qfbIhOmVTIKvCoder2"
    value: <
      spec: <
        urn: "beam:coder:kv:v1"
      >
      component_coder_ids: "qfbIhOmVTIVarIntCoder"
      component_coder_ids: "qfbIhOmVTISerializableCoder1"
    >
  >
  coders: <
    key: "qfbIhOmVTIKvCoder3"
    value: <
      spec: <
        urn: "beam:coder:kv:v1"
      >
      component_coder_ids: "qfbIhOmVTIVarIntCoder"
      component_coder_ids: "qfbIhOmVTITimestampedValueCoder"
    >
  >
  coders: <
    key: "qfbIhOmVTIKvCoder4"
    value: <
      spec: <
        urn: "beam:coder:kv:v1"
      >
      component_coder_ids: "qfbIhOmVTIVarIntCoder"
      component_coder_ids: "qfbIhOmVTIIterableCoder"
    >
  >
  coders: <
    key: "qfbIhOmVTISerializableCoder"
    value: <
      spec: <
        urn: "beam:coders:javasdk:0.1"
        payload: "\202SNAPPY\000\000\000\000\001\000\000\000\001\000\000\000\312\241\002\360^\254\355\000\005sr\000,org.apache.beam.sdk.coders.SerializableCoder\010\364 \216\305ynl\002\000\001L\000\004typet\000\021Ljava/lang/Class;xr\000&ofV\000\024Custom\005P8j\260\010\235\013;\035\013\002\000\000xr\000 j\213\000\005/\034C\335\325\211\256\274~\370\001/\020pvr\000$N1\000\\io.BoundedSource!`v\023\177\370QA\0015\010r\000\035Z3\000\t,0\020\213\372\020Y\214\347\323\002\000\000xp"
      >
    >
  >
  coders: <
    key: "qfbIhOmVTISerializableCoder1"
    value: <
      spec: <
        urn: "beam:coders:javasdk:0.1"
        payload: "\202SNAPPY\000\000\000\000\001\000\000\000\001\000\000\000\312\214\002\360^\254\355\000\005sr\000,org.apache.beam.sdk.coders.SerializableCoder\010\364 \216\305ynl\002\000\001L\000\004typet\000\021Ljava/lang/Class;xr\000&ofV\000\024Custom\005P8j\260\010\235\013;\035\013\002\000\000xr\000 j\213\000\005/\034C\335\325\211\256\274~\370\001/\020pvr\000;N1\000Pio.BoundedReadFromUnb\t\021`Source$Shardd\236\240U\205\000j\363\002\000\000xp"
      >
    >
  >
  coders: <
    key: "qfbIhOmVTITimestampedValueCoder"
    value: <
      spec: <
        urn: "beam:coders:javasdk:0.1"
        payload: "\202SNAPPY\000\000\000\000\001\000\000\000\001\000\000\001?\315\003\270\254\355\000\005sr\000Aorg.apache.beam.sdk.values.TimestampedV\001\022\000$>\021\000LCoder\364\376\202p\274iX-\002\000\001L\000\nv\001)\005\030\034t\000\"Lorg/\t]\000/\001],/sdk/coders/\005$\020;xr\000*N\202\000\t%(.Structured\005/8s\277\022\016\325\3246\021\002\000\000xr\000 j9\000\005/\034C\335\325\211\256\274~\370\001/\020psr\000,j1\000(Serializabl\t\277\034\010\364 \216\305ynl\005\327\\\004typet\000\021Ljava/lang/Class\001\300\000&jV\000\024Custom\005\215Pj\260\010\235\013;\035\013\002\000\000xq\000~\000\003vr\000;N;\000Pio.BoundedReadFromUnb\t\021`Source$Shardd\236\240U\205\000j\363\002\000\000xp"
      >
    >
  >
  coders: <
    key: "qfbIhOmVTIValueWithRecordIdCoder"
    value: <
      spec: <
        urn: "beam:coders:javasdk:0.1"
        payload: "\202SNAPPY\000\000\000\000\001\000\000\000\001\000\000\001h\364\004\320\254\355\000\005sr\000Corg.apache.beam.sdk.values.ValueWithRecordId$B\022\000LCoder{\343\376bQ\014\204>\002\000\002L\000\007i\t\025\034t\000+Lorg/\t\\\000/\001\\\024/sdk/c\0012(s/ByteArray\005B\014;L\000\n\005u\005\016\010t\000\"n;\000\005$\020;xr\000*N\274\000\t`(.Structured\005/8s\277\022\016\325\3246\021\002\000\000xr\000 j9\000\005/\034C\335\325\211\256\274~\370\001/\020psr\000)j1\0006\312\000\034\351\277\005|\263\3746\333\001:\010r\000&j8\000\024Atomic\005o\034\307\354\265\314\205tPF\0015 q\000~\000\003sr\000-N;\000$io.kafka.K\001\006\000R%\227\005B<I\320\251Y\032\341{#\002\000\001L\000\007kv\005\025\010t\000$nH\001\000K\t&\000;\025s\000\"Ns\000-P\r- j\000\275\271\035\243\3126\002!\353\010\010ke)\277\001\264\000\0022\303\001\005\022\rY$q\000~\000\006q\000~\000\016"
      >
    >
  >
  coders: <
    key: "qfbIhOmVTIVarIntCoder"
    value: <
      spec: <
        urn: "beam:coders:javasdk:0.1"
        payload: "\202SNAPPY\000\000\000\000\001\000\000\000\001\000\000\000\210\330\001\350\254\355\000\005sr\000&org.apache.beam.sdk.coders.VarIntCoder\300K2\332\251KVh\002\000\000xrr5\000\024Atomic\0055 \307\354\265\314\205tPF\002\0055\000*jj\000$Structured\0059\034s\277\022\016\325\3246\021\t9\000 j9\000\005/0C\335\325\211\256\274~\370\002\000\000xp"
      >
    >
  >
  coders: <
    key: "qfbIhOmVTIVoidCoder"
    value: <
      spec: <
        urn: "beam:coders:javasdk:0.1"
        payload: "\202SNAPPY\000\000\000\000\001\000\000\000\001\000\000\000\210\326\001\350\254\355\000\005sr\000$org.apache.beam.sdk.coders.VoidCoder\271\277U\233\350\r\257U\002\000\000xr\000&j3\000\024Atomic\0055 \307\354\265\314\205tPF\002\0055\000*j5\000$Structured\0059\034s\277\022\016\325\3246\021\t9\000 j9\000\005/0C\335\325\211\256\274~\370\002\000\000xp"
      >
    >
  >
  environments: <
    key: "go"
    value: <
      urn: "beam:env:docker:v1"
      payload: "\n\026apache/beam_go_sdk:dev"
      capabilities: "beam:protocol:progress_reporting:v0"
      capabilities: "beam:protocol:multi_core_bundle_processing:v1"
      capabilities: "beam:version:sdk_base:go"
      capabilities: "beam:coder:bytes:v1"
      capabilities: "beam:coder:bool:v1"
      capabilities: "beam:coder:varint:v1"
      capabilities: "beam:coder:double:v1"
      capabilities: "beam:coder:string_utf8:v1"
      capabilities: "beam:coder:length_prefix:v1"
      capabilities: "beam:coder:kv:v1"
      capabilities: "beam:coder:iterable:v1"
      capabilities: "beam:coder:state_backed_iterable:v1"
      capabilities: "beam:coder:windowed_value:v1"
      capabilities: "beam:coder:global_window:v1"
      capabilities: "beam:coder:interval_window:v1"
      capabilities: "beam:coder:row:v1"
      dependencies: <
        type_urn: "beam:artifact:type:file:v1"
        role_urn: "beam:artifact:role:go_worker_binary:v1"
      >
    >
  >
  environments: <
    key: "qfbIhOmVTIbeam:env:docker:v1"
    value: <
      urn: "beam:env:docker:v1"
      payload: "\n apache/beam_java8_sdk:2.38.0.dev"
      capabilities: "beam:coder:bytes:v1"
      capabilities: "beam:coder:bool:v1"
      capabilities: "beam:coder:varint:v1"
      capabilities: "beam:coder:string_utf8:v1"
      capabilities: "beam:coder:iterable:v1"
      capabilities: "beam:coder:timer:v1"
      capabilities: "beam:coder:kv:v1"
      capabilities: "beam:coder:length_prefix:v1"
      capabilities: "beam:coder:global_window:v1"
      capabilities: "beam:coder:interval_window:v1"
      capabilities: "beam:coder:custom_window:v1"
      capabilities: "beam:coder:windowed_value:v1"
      capabilities: "beam:coder:double:v1"
      capabilities: "beam:coder:row:v1"
      capabilities: "beam:coder:param_windowed_value:v1"
      capabilities: "beam:coder:state_backed_iterable:v1"
      capabilities: "beam:coder:sharded_key:v1"
      capabilities: "beam:protocol:multi_core_bundle_processing:v1"
      capabilities: "beam:protocol:progress_reporting:v1"
      capabilities: "beam:protocol:harness_monitoring_infos:v1"
      capabilities: "beam:protocol:control_request_elements_embedding:v1"
      capabilities: "beam:protocol:state_caching:v1"
      capabilities: "beam:version:sdk_base:apache/beam_java8_sdk:2.38.0.dev"
      capabilities: "beam:transform:sdf_truncate_sized_restrictions:v1"
      capabilities: "beam:transform:to_string:v1"
      dependencies: <
        type_urn: "beam:artifact:type:file:v1"
        type_payload: "\nL/tmp/artifacts/icedtea-sound-ebvtNFkfFXg4aaYFuDnwKpwDSjzsaZqlqv5iKxPTr-U.jar\022@79bbed34591f15783869a605b839f02a9c034a3cec699aa5aafe622b13d3afe5"
        role_urn: "beam:artifact:role:staging_to:v1"
        role_payload: "\n=icedtea-sound-ebvtNFkfFXg4aaYFuDnwKpwDSjzsaZqlqv5iKxPTr-U.jar"
      >
      dependencies: <
        type_urn: "beam:artifact:type:file:v1"
        type_payload: "\nF/tmp/artifacts/jaccess-ULFTCPsb6cLYZ0f1BG1FQfczmHNaZCx8plXuRDKpBqE.jar\022@50b15308fb1be9c2d86747f5046d4541f73398735a642c7ca655ee4432a906a1"
        role_urn: "beam:artifact:role:staging_to:v1"
        role_payload: "\n7jaccess-ULFTCPsb6cLYZ0f1BG1FQfczmHNaZCx8plXuRDKpBqE.jar"
      >
      dependencies: <
        type_urn: "beam:artifact:type:file:v1"
        type_payload: "\nI/tmp/artifacts/localedata-dUHqyGxaTVCjfTI8MckPYarZ3_mwf62udkxaHi1aKns.jar\022@7541eac86c5a4d50a37d323c31c90f61aad9dff9b07fadae764c5a1e2d5a2a7b"
        role_urn: "beam:artifact:role:staging_to:v1"
        role_payload: "\n:localedata-dUHqyGxaTVCjfTI8MckPYarZ3_mwf62udkxaHi1aKns.jar"
      >
      dependencies: <
        type_urn: "beam:artifact:type:file:v1"
        type_payload: "\nF/tmp/artifacts/nashorn-XdUndQGroXOP9NCsfITpBERYcbbGXVHLjbvNWXCh-3A.jar\022@5dd5277501aba1738ff4d0ac7c84e904445871b6c65d51cb8dbbcd5970a1fb70"
        role_urn: "beam:artifact:role:staging_to:v1"
        role_payload: "\n7nashorn-XdUndQGroXOP9NCsfITpBERYcbbGXVHLjbvNWXCh-3A.jar"
      >
      dependencies: <
        type_urn: "beam:artifact:type:file:v1"
        type_payload: "\nG/tmp/artifacts/cldrdata-YqzuKX1QnLCOo0cwjKRdBhGrip_ltIJZg-APT60tUPA.jar\022@62acee297d509cb08ea347308ca45d0611ab8a9fe5b4825983e00f4fad2d50f0"
        role_urn: "beam:artifact:role:staging_to:v1"
        role_payload: "\n8cldrdata-YqzuKX1QnLCOo0cwjKRdBhGrip_ltIJZg-APT60tUPA.jar"
      >
      dependencies: <
        type_urn: "beam:artifact:type:file:v1"
        type_payload: "\nD/tmp/artifacts/dnsns-dhEp186udEF6X6chZus-RJzWRmzlccxx1_btlXWayVI.jar\022@761129d7ceae74417a5fa72166eb3e449cd6466ce571cc71d7f6ed95759ac952"
        role_urn: "beam:artifact:role:staging_to:v1"
        role_payload: "\n5dnsns-dhEp186udEF6X6chZus-RJzWRmzlccxx1_btlXWayVI.jar"
      >
      dependencies: <
        type_urn: "beam:artifact:type:file:v1"
        type_payload: "\nr/tmp/artifacts/beam-sdks-java-io-expansion-service-2.38.0-SNAPSHOT-I-k8fkgIjXjAxrvbmFFFPCoSdpDtdruWL9MstxHtNoI.jar\022@23e93c7e48088d78c0c6bbdb9851453c2a127690ed76bb962fd32cb711ed3682"
        role_urn: "beam:artifact:role:staging_to:v1"
        role_payload: "\ncbeam-sdks-java-io-expansion-service-2.38.0-SNAPSHOT-I-k8fkgIjXjAxrvbmFFFPCoSdpDtdruWL9MstxHtNoI.jar"
      >
    >
  >
>
root_transform_ids: "e4"
root_transform_ids: "e5"
root_transform_ids: "s1"
root_transform_ids: "s3"
requirements: "beam:requirement:pardo:splittable_dofn:v1"
2022/03/11 06:17:47 Cross-compiling <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/go/test/integration/io/xlang/kafka/kafka_test.go> as /tmp/worker-2-1646979467441496123
2022/03/11 06:17:49 Prepared job with id: go-testkafkaio_basicreadwrite_e74393a8-77c7-4d3a-a622-96d2318338c7 and staging token: go-testkafkaio_basicreadwrite_e74393a8-77c7-4d3a-a622-96d2318338c7
2022/03/11 06:17:49 Staged binary artifact with token: 
2022/03/11 06:17:49 Submitted job: go0testkafkaio0basicreadwrite-jenkins-0311061749-adc0b769_0c9724d8-b3d6-4415-8443-8102a1ed60f8
2022/03/11 06:17:49 Job state: STOPPED
2022/03/11 06:17:49 Job state: STARTING
2022/03/11 06:17:49 Job state: RUNNING
2022/03/11 06:18:54 Job state: DONE
2022/03/11 06:18:54 Warning: 6 errors during metrics processing: [failed to deduce Step from MonitoringInfo: urn:"beam:metric:element_count:v1"  type:"beam:metrics:sum_int64:v1"  payload:"\x01"  labels:{key:"PCOLLECTION"  value:"qfbIhOmVTIExternal/KafkaIO.Read/KafkaIO.Read.ReadFromKafkaViaUnbounded/Read(KafkaUnboundedSource)/Create/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/SplitAndSize0"} failed to deduce Step from MonitoringInfo: urn:"beam:metric:sampled_byte_size:v1"  type:"beam:metrics:distribution_int64:v1"  payload:"\x01\xc9A\xc9A\xc9A"  labels:{key:"PCOLLECTION"  value:"qfbIhOmVTIExternal/KafkaIO.Read/KafkaIO.Read.ReadFromKafkaViaUnbounded/Read(KafkaUnboundedSource)/Create/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/SplitAndSize0"} failed to deduce Step from MonitoringInfo: urn:"beam:metric:element_count:v1"  type:"beam:metrics:sum_int64:v1"  payload:"\x01"  labels:{key:"PCOLLECTION"  value:"qfbIhOmVTIExternal/KafkaIO.Read/KafkaIO.Read.ReadFromKafkaViaUnbounded/Read(KafkaUnboundedSource)/Create/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/SplitAndSize0"} failed to deduce Step from MonitoringInfo: urn:"beam:metric:element_count:v1"  type:"beam:metrics:sum_int64:v1"  payload:"\x01"  labels:{key:"PCOLLECTION"  value:"qfbIhOmVTIExternal/KafkaIO.Read/KafkaIO.Read.ReadFromKafkaViaUnbounded/Read(KafkaUnboundedSource)/Create/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/PairWithRestriction0"} failed to deduce Step from MonitoringInfo: urn:"beam:metric:sampled_byte_size:v1"  type:"beam:metrics:distribution_int64:v1"  payload:"\x01\xc9A\xc9A\xc9A"  labels:{key:"PCOLLECTION"  value:"qfbIhOmVTIExternal/KafkaIO.Read/KafkaIO.Read.ReadFromKafkaViaUnbounded/Read(KafkaUnboundedSource)/Create/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/SplitAndSize0"} failed to deduce Step from MonitoringInfo: urn:"beam:metric:sampled_byte_size:v1"  type:"beam:metrics:distribution_int64:v1"  payload:"\x01\xbcA\xbcA\xbcA"  labels:{key:"PCOLLECTION"  value:"qfbIhOmVTIExternal/KafkaIO.Read/KafkaIO.Read.ReadFromKafkaViaUnbounded/Read(KafkaUnboundedSource)/Create/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/PairWithRestriction0"}]
--- PASS: TestKafkaIO_BasicReadWrite (91.48s)
PASS
ok  	github.com/apache/beam/sdks/v2/go/test/integration/io/xlang/kafka	94.537s
FAIL

> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerGoUsingJava FAILED
> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerJavaUsingJava
> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerJavaUsingPython
> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerJavaUsingPythonOnly
> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingJava
> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingPython
> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingSql
> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerCleanup
> Task :runners:spark:2:job-server:sparkJobServerCleanup

FAILURE: Build failed with an exception.

* Where:
Build file '<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/go/test/build.gradle'> line: 176

* What went wrong:
Execution failed for task ':runners:spark:2:job-server:validatesCrossLanguageRunnerGoUsingJava'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 8.0.

You can use '--warning-mode all' to show the individual deprecation warnings and determine if they come from your own scripts or plugins.

See https://docs.gradle.org/7.3.2/userguide/command_line_interface.html#sec:command_line_warnings

Execution optimizations have been disabled for 1 invalid unit(s) of work during this build to ensure correctness.
Please consult deprecation warnings for more details.

BUILD FAILED in 36m 35s
250 actionable tasks: 166 executed, 73 from cache, 11 up-to-date

Publishing build scan...
https://gradle.com/s/562ebnv6zleoo

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Jenkins build is back to normal : beam_PostCommit_XVR_Spark #3459

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/3459/display/redirect?page=changes>


---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_XVR_Spark #3458

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/3458/display/redirect>

Changes:


------------------------------------------
[...truncated 871.02 KB...]
                        remaining = until - time.time()
                        if remaining < 0:
>                           raise grpc.FutureTimeoutError()
E                           grpc.FutureTimeoutError

../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:85: FutureTimeoutError
----------------------------- Captured stderr call -----------------------------
E0311 13:01:27.165192431 1922925 fork_posix.cc:70]           Fork support is only compatible with the epoll1 and poll polling strategies
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
------------------------------ Captured log call -------------------------------
WARNING  root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
WARNING  root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
______________________ SqlTransformTest.test_tagged_join _______________________

self = <apache_beam.transforms.sql_test.SqlTransformTest testMethod=test_tagged_join>

    def test_tagged_join(self):
      with TestPipeline() as p:
        enrich = (
            p | "Create enrich" >> beam.Create(
                [Enrich(1, "a"), Enrich(2, "b"), Enrich(26, "z")]))
        simple = (
            p | "Create simple" >> beam.Create([
                SimpleRow(1, "foo", 3.14),
                SimpleRow(26, "bar", 1.11),
                SimpleRow(1, "baz", 2.34)
            ]))
        out = ({
            'simple': simple, 'enrich': enrich
        }
               | SqlTransform(
                   """
                SELECT
                  simple.`id` AS `id`,
                  enrich.metadata AS metadata
                FROM simple
                JOIN enrich
                ON simple.`id` = enrich.`id`"""))
>       assert_that(out, equal_to([(1, "a"), (26, "z"), (1, "a")]))

apache_beam/transforms/sql_test.py:141: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:596: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:114: in run
    False if self.not_use_test_runner_api else test_runner_api))
apache_beam/pipeline.py:573: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/portability/portable_runner.py:438: in run_pipeline
    job_service_handle = self.create_job_service(options)
apache_beam/runners/portability/portable_runner.py:317: in create_job_service
    return self.create_job_service_handle(server.start(), options)
apache_beam/runners/portability/job_server.py:54: in start
    grpc.channel_ready_future(channel).result(timeout=self._timeout)
../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:139: in result
    self._block(timeout)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <grpc._utilities._ChannelReadyFuture object at 0x7f5133bb2160>
timeout = 60

    def _block(self, timeout):
        until = None if timeout is None else time.time() + timeout
        with self._condition:
            while True:
                if self._cancelled:
                    raise grpc.FutureCancelledError()
                elif self._matured:
                    return
                else:
                    if until is None:
                        self._condition.wait()
                    else:
                        remaining = until - time.time()
                        if remaining < 0:
>                           raise grpc.FutureTimeoutError()
E                           grpc.FutureTimeoutError

../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:85: FutureTimeoutError
----------------------------- Captured stderr call -----------------------------
E0311 13:02:40.201579233 1922925 fork_posix.cc:70]           Fork support is only compatible with the epoll1 and poll polling strategies
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
------------------------------ Captured log call -------------------------------
WARNING  root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
WARNING  root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
__________________ SqlTransformTest.test_windowing_before_sql __________________

self = <apache_beam.transforms.sql_test.SqlTransformTest testMethod=test_windowing_before_sql>

    def test_windowing_before_sql(self):
      with TestPipeline() as p:
        out = (
            p | beam.Create([
                SimpleRow(5, "foo", 1.),
                SimpleRow(15, "bar", 2.),
                SimpleRow(25, "baz", 3.)
            ])
            | beam.Map(lambda v: beam.window.TimestampedValue(v, v.id)).
            with_output_types(SimpleRow)
            | beam.WindowInto(
                beam.window.FixedWindows(10)).with_output_types(SimpleRow)
            | SqlTransform("SELECT COUNT(*) as `count` FROM PCOLLECTION"))
>       assert_that(out, equal_to([(1, ), (1, ), (1, )]))

apache_beam/transforms/sql_test.py:175: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:596: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:114: in run
    False if self.not_use_test_runner_api else test_runner_api))
apache_beam/pipeline.py:573: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/portability/portable_runner.py:438: in run_pipeline
    job_service_handle = self.create_job_service(options)
apache_beam/runners/portability/portable_runner.py:317: in create_job_service
    return self.create_job_service_handle(server.start(), options)
apache_beam/runners/portability/job_server.py:54: in start
    grpc.channel_ready_future(channel).result(timeout=self._timeout)
../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:139: in result
    self._block(timeout)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <grpc._utilities._ChannelReadyFuture object at 0x7f5133b92c18>
timeout = 60

    def _block(self, timeout):
        until = None if timeout is None else time.time() + timeout
        with self._condition:
            while True:
                if self._cancelled:
                    raise grpc.FutureCancelledError()
                elif self._matured:
                    return
                else:
                    if until is None:
                        self._condition.wait()
                    else:
                        remaining = until - time.time()
                        if remaining < 0:
>                           raise grpc.FutureTimeoutError()
E                           grpc.FutureTimeoutError

../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:85: FutureTimeoutError
----------------------------- Captured stderr call -----------------------------
E0311 13:03:52.865939596 1922925 fork_posix.cc:70]           Fork support is only compatible with the epoll1 and poll polling strategies
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
------------------------------ Captured log call -------------------------------
WARNING  root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
WARNING  root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
_________________ SqlTransformTest.test_zetasql_generate_data __________________

self = <apache_beam.transforms.sql_test.SqlTransformTest testMethod=test_zetasql_generate_data>

    def test_zetasql_generate_data(self):
      with TestPipeline() as p:
        out = p | SqlTransform(
            """SELECT
              CAST(1 AS INT64) AS `int`,
              CAST('foo' AS STRING) AS `str`,
              CAST(3.14  AS FLOAT64) AS `flt`""",
            dialect="zetasql")
>       assert_that(out, equal_to([(1, "foo", 3.14)]))

apache_beam/transforms/sql_test.py:160: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:596: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:114: in run
    False if self.not_use_test_runner_api else test_runner_api))
apache_beam/pipeline.py:573: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/portability/portable_runner.py:438: in run_pipeline
    job_service_handle = self.create_job_service(options)
apache_beam/runners/portability/portable_runner.py:317: in create_job_service
    return self.create_job_service_handle(server.start(), options)
apache_beam/runners/portability/job_server.py:54: in start
    grpc.channel_ready_future(channel).result(timeout=self._timeout)
../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:139: in result
    self._block(timeout)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <grpc._utilities._ChannelReadyFuture object at 0x7f5133b5b9b0>
timeout = 60

    def _block(self, timeout):
        until = None if timeout is None else time.time() + timeout
        with self._condition:
            while True:
                if self._cancelled:
                    raise grpc.FutureCancelledError()
                elif self._matured:
                    return
                else:
                    if until is None:
                        self._condition.wait()
                    else:
                        remaining = until - time.time()
                        if remaining < 0:
>                           raise grpc.FutureTimeoutError()
E                           grpc.FutureTimeoutError

../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:85: FutureTimeoutError
----------------------------- Captured stderr call -----------------------------
E0311 13:05:05.176886280 1922925 fork_posix.cc:70]           Fork support is only compatible with the epoll1 and poll polling strategies
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
------------------------------ Captured log call -------------------------------
WARNING  root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
WARNING  root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
- generated xml file: <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/python/pytest_xlangSqlValidateRunner.xml> -
================= 9 failed, 5241 deselected in 836.55 seconds ==================

> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingSql FAILED
> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerCleanup
> Task :runners:spark:2:job-server:sparkJobServerCleanup

FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingSql'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 8.0.

You can use '--warning-mode all' to show the individual deprecation warnings and determine if they come from your own scripts or plugins.

See https://docs.gradle.org/7.3.2/userguide/command_line_interface.html#sec:command_line_warnings

Execution optimizations have been disabled for 1 invalid unit(s) of work during this build to ensure correctness.
Please consult deprecation warnings for more details.

BUILD FAILED in 55m 29s
250 actionable tasks: 172 executed, 67 from cache, 11 up-to-date

Publishing build scan...
https://gradle.com/s/h7gveudt6v3zu

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org