You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by pa...@apache.org on 2021/08/04 21:15:47 UTC

[beam] branch master updated: [BEAM-12670] Relocate bq client exception imports to try block and conditionally turn off tests if imports fail

This is an automated email from the ASF dual-hosted git repository.

pabloem pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new 2c7911b  [BEAM-12670] Relocate bq client exception imports to try block and conditionally turn off tests if imports fail
     new 39cf3fc  Merge pull request #15264 from [BEAM-12670] Relocate bq client exception imports to try block and conditionally turn off tests if imports fail
2c7911b is described below

commit 2c7911b3268fff1aa1fe9329495d90052e0716a0
Author: Alex Amato <aj...@google.com>
AuthorDate: Mon Aug 2 17:53:47 2021 -0700

    [BEAM-12670] Relocate bq client exception imports to try block and conditionally turn off tests if imports fail
---
 sdks/python/apache_beam/io/gcp/bigquery_tools.py      |  3 +--
 sdks/python/apache_beam/io/gcp/bigquery_tools_test.py | 11 ++++++++---
 2 files changed, 9 insertions(+), 5 deletions(-)

diff --git a/sdks/python/apache_beam/io/gcp/bigquery_tools.py b/sdks/python/apache_beam/io/gcp/bigquery_tools.py
index 114c42d..964b8f9 100644
--- a/sdks/python/apache_beam/io/gcp/bigquery_tools.py
+++ b/sdks/python/apache_beam/io/gcp/bigquery_tools.py
@@ -68,6 +68,7 @@ from apache_beam.utils.histogram import LinearBucket
 try:
   from apitools.base.py.transfer import Upload
   from apitools.base.py.exceptions import HttpError, HttpForbiddenError
+  from google.api_core.exceptions import ClientError, GoogleAPICallError
   from google.cloud import bigquery as gcp_bigquery
 except ImportError:
   gcp_bigquery = None
@@ -617,8 +618,6 @@ class BigQueryWrapper(object):
 
     Docs for this BQ call: https://cloud.google.com/bigquery/docs/reference\
       /rest/v2/tabledata/insertAll."""
-    from google.api_core.exceptions import ClientError
-    from google.api_core.exceptions import GoogleAPICallError
     # The rows argument is a list of
     # bigquery.TableDataInsertAllRequest.RowsValueListEntry instances as
     # required by the InsertAll() method.
diff --git a/sdks/python/apache_beam/io/gcp/bigquery_tools_test.py b/sdks/python/apache_beam/io/gcp/bigquery_tools_test.py
index 1252ffb..84ac2f7 100644
--- a/sdks/python/apache_beam/io/gcp/bigquery_tools_test.py
+++ b/sdks/python/apache_beam/io/gcp/bigquery_tools_test.py
@@ -54,9 +54,14 @@ from apache_beam.options.value_provider import StaticValueProvider
 # pylint: disable=wrong-import-order, wrong-import-position
 try:
   from apitools.base.py.exceptions import HttpError, HttpForbiddenError
+  from google.api_core.exceptions import ClientError, DeadlineExceeded
+  from google.api_core.exceptions import InternalServerError
 except ImportError:
+  ClientError = None
+  DeadlineExceeded = None
   HttpError = None
   HttpForbiddenError = None
+  InternalServerError = None
 # pylint: enable=wrong-import-order, wrong-import-position
 
 
@@ -459,15 +464,15 @@ class TestBigQueryWrapper(unittest.TestCase):
     self.assertTrue(
         found, "Did not find write call metric with status: %s" % status)
 
+  @unittest.skipIf(ClientError is None, 'GCP dependencies are not installed')
   def test_insert_rows_sets_metric_on_failure(self):
-    from google.api_core import exceptions
     MetricsEnvironment.process_wide_container().reset()
     client = mock.Mock()
     client.insert_rows_json = mock.Mock(
         # Fail a few times, then succeed.
         side_effect=[
-            exceptions.DeadlineExceeded("Deadline Exceeded"),
-            exceptions.InternalServerError("Internal Error"),
+            DeadlineExceeded("Deadline Exceeded"),
+            InternalServerError("Internal Error"),
             [],
         ])
     wrapper = beam.io.gcp.bigquery_tools.BigQueryWrapper(client)