You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by "ASF GitHub Bot (JIRA)" <ji...@apache.org> on 2018/01/09 18:35:01 UTC

[jira] [Commented] (BEAM-3442) Clean up usage of deprecated BaseException.message

    [ https://issues.apache.org/jira/browse/BEAM-3442?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16318886#comment-16318886 ] 

ASF GitHub Bot commented on BEAM-3442:
--------------------------------------

chamikaramj closed pull request #4366: [BEAM-3442] Cleanup DeprecationWarning for BaseException.message.
URL: https://github.com/apache/beam/pull/4366
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/sdks/python/apache_beam/io/filesystems_test.py b/sdks/python/apache_beam/io/filesystems_test.py
index eaaa40f9f7e..d8cc9711550 100644
--- a/sdks/python/apache_beam/io/filesystems_test.py
+++ b/sdks/python/apache_beam/io/filesystems_test.py
@@ -120,10 +120,9 @@ def test_match_file_empty(self):
 
   def test_match_file_exception(self):
     # Match files with None so that it throws an exception
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Unable to get the Filesystem') as error:
       FileSystems.match([None])
-    self.assertTrue(
-        error.exception.message.startswith('Unable to get the Filesystem'))
     self.assertEqual(error.exception.exception_details.keys(), [None])
 
   def test_match_directory(self):
@@ -155,10 +154,9 @@ def test_copy(self):
   def test_copy_error(self):
     path1 = os.path.join(self.tmpdir, 'f1')
     path2 = os.path.join(self.tmpdir, 'f2')
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Copy operation failed') as error:
       FileSystems.copy([path1], [path2])
-    self.assertTrue(
-        error.exception.message.startswith('Copy operation failed'))
     self.assertEqual(error.exception.exception_details.keys(), [(path1, path2)])
 
   def test_copy_directory(self):
@@ -188,10 +186,9 @@ def test_rename(self):
   def test_rename_error(self):
     path1 = os.path.join(self.tmpdir, 'f1')
     path2 = os.path.join(self.tmpdir, 'f2')
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Rename operation failed') as error:
       FileSystems.rename([path1], [path2])
-    self.assertTrue(
-        error.exception.message.startswith('Rename operation failed'))
     self.assertEqual(error.exception.exception_details.keys(), [(path1, path2)])
 
   def test_rename_directory(self):
@@ -230,8 +227,7 @@ def test_delete(self):
 
   def test_delete_error(self):
     path1 = os.path.join(self.tmpdir, 'f1')
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Delete operation failed') as error:
       FileSystems.delete([path1])
-    self.assertTrue(
-        error.exception.message.startswith('Delete operation failed'))
     self.assertEqual(error.exception.exception_details.keys(), [path1])
diff --git a/sdks/python/apache_beam/io/gcp/bigquery_test.py b/sdks/python/apache_beam/io/gcp/bigquery_test.py
index 84904816434..ff6721e6d91 100644
--- a/sdks/python/apache_beam/io/gcp/bigquery_test.py
+++ b/sdks/python/apache_beam/io/gcp/bigquery_test.py
@@ -20,6 +20,7 @@
 import datetime
 import json
 import logging
+import re
 import time
 import unittest
 
@@ -28,6 +29,7 @@
 
 import apache_beam as beam
 from apache_beam.internal.gcp.json_value import to_json_value
+from apache_beam.io.gcp.bigquery import JSON_COMPLIANCE_ERROR
 from apache_beam.io.gcp.bigquery import RowAsDictJsonCoder
 from apache_beam.io.gcp.bigquery import TableRowJsonCoder
 from apache_beam.io.gcp.bigquery import parse_table_schema_from_json
@@ -54,11 +56,10 @@ def test_row_as_dict(self):
     self.assertEqual(test_value, coder.decode(coder.encode(test_value)))
 
   def json_compliance_exception(self, value):
-    with self.assertRaises(ValueError) as exn:
+    with self.assertRaisesRegexp(ValueError, re.escape(JSON_COMPLIANCE_ERROR)):
       coder = RowAsDictJsonCoder()
       test_value = {'s': value}
-      self.assertEqual(test_value, coder.decode(coder.encode(test_value)))
-      self.assertTrue(bigquery.JSON_COMPLIANCE_ERROR in exn.exception.message)
+      coder.decode(coder.encode(test_value))
 
   def test_invalid_json_nan(self):
     self.json_compliance_exception(float('nan'))
@@ -105,13 +106,12 @@ def test_row_and_no_schema(self):
     test_row = bigquery.TableRow(
         f=[bigquery.TableCell(v=to_json_value(e))
            for e in ['abc', 123, 123.456, True]])
-    with self.assertRaises(AttributeError) as ctx:
+    with self.assertRaisesRegexp(AttributeError,
+                                 r'^The TableRowJsonCoder requires'):
       coder.encode(test_row)
-    self.assertTrue(
-        ctx.exception.message.startswith('The TableRowJsonCoder requires'))
 
   def json_compliance_exception(self, value):
-    with self.assertRaises(ValueError) as exn:
+    with self.assertRaisesRegexp(ValueError, re.escape(JSON_COMPLIANCE_ERROR)):
       schema_definition = [('f', 'FLOAT')]
       schema = bigquery.TableSchema(
           fields=[bigquery.TableFieldSchema(name=k, type=v)
@@ -120,7 +120,6 @@ def json_compliance_exception(self, value):
       test_row = bigquery.TableRow(
           f=[bigquery.TableCell(v=to_json_value(value))])
       coder.encode(test_row)
-      self.assertTrue(bigquery.JSON_COMPLIANCE_ERROR in exn.exception.message)
 
   def test_invalid_json_nan(self):
     self.json_compliance_exception(float('nan'))
@@ -475,17 +474,16 @@ def test_read_from_query_unflatten_records(self):
     self.assertFalse(reader.flatten_results)
 
   def test_using_both_query_and_table_fails(self):
-    with self.assertRaises(ValueError) as exn:
+    with self.assertRaisesRegexp(
+        ValueError,
+        r'Both a BigQuery table and a query were specified\. Please specify '
+        r'only one of these'):
       beam.io.BigQuerySource(table='dataset.table', query='query')
-      self.assertEqual(exn.exception.message, 'Both a BigQuery table and a'
-                       ' query were specified. Please specify only one of '
-                       'these.')
 
   def test_using_neither_query_nor_table_fails(self):
-    with self.assertRaises(ValueError) as exn:
+    with self.assertRaisesRegexp(
+        ValueError, r'A BigQuery table or a query must be specified'):
       beam.io.BigQuerySource()
-      self.assertEqual(exn.exception.message, 'A BigQuery table or a query'
-                       ' must be specified')
 
   def test_read_from_table_as_tablerows(self):
     client = mock.Mock()
@@ -566,15 +564,13 @@ def test_no_table_and_create_never(self, patched_time_sleep):
     client.tables.Get.side_effect = HttpError(
         response={'status': '404'}, url='', content='')
     create_disposition = beam.io.BigQueryDisposition.CREATE_NEVER
-    with self.assertRaises(RuntimeError) as exn:
+    with self.assertRaisesRegexp(
+        RuntimeError, r'Table project:dataset\.table not found but create '
+                      r'disposition is CREATE_NEVER'):
       with beam.io.BigQuerySink(
           'project:dataset.table',
           create_disposition=create_disposition).writer(client):
         pass
-    self.assertEqual(
-        exn.exception.message,
-        'Table project:dataset.table not found but create disposition is '
-        'CREATE_NEVER.')
 
   def test_no_table_and_create_if_needed(self):
     client = mock.Mock()
@@ -601,15 +597,13 @@ def test_no_table_and_create_if_needed_and_no_schema(
     client.tables.Get.side_effect = HttpError(
         response={'status': '404'}, url='', content='')
     create_disposition = beam.io.BigQueryDisposition.CREATE_IF_NEEDED
-    with self.assertRaises(RuntimeError) as exn:
+    with self.assertRaisesRegexp(
+        RuntimeError, r'Table project:dataset\.table requires a schema\. None '
+                      r'can be inferred because the table does not exist'):
       with beam.io.BigQuerySink(
           'project:dataset.table',
           create_disposition=create_disposition).writer(client):
         pass
-    self.assertEqual(
-        exn.exception.message,
-        'Table project:dataset.table requires a schema. None can be inferred '
-        'because the table does not exist.')
 
   @mock.patch('time.sleep', return_value=None)
   def test_table_not_empty_and_write_disposition_empty(
@@ -621,15 +615,13 @@ def test_table_not_empty_and_write_disposition_empty(
         schema=bigquery.TableSchema())
     client.tabledata.List.return_value = bigquery.TableDataList(totalRows=1)
     write_disposition = beam.io.BigQueryDisposition.WRITE_EMPTY
-    with self.assertRaises(RuntimeError) as exn:
+    with self.assertRaisesRegexp(
+        RuntimeError, r'Table project:dataset\.table is not empty but write '
+                      r'disposition is WRITE_EMPTY'):
       with beam.io.BigQuerySink(
           'project:dataset.table',
           write_disposition=write_disposition).writer(client):
         pass
-    self.assertEqual(
-        exn.exception.message,
-        'Table project:dataset.table is not empty but write disposition is '
-        'WRITE_EMPTY.')
 
   def test_table_empty_and_write_disposition_empty(self):
     client = mock.Mock()
@@ -745,7 +737,7 @@ def test_delete_dataset_retries_fail(self, patched_time_sleep):
     client = mock.Mock()
     client.datasets.Delete.side_effect = ValueError("Cannot delete")
     wrapper = beam.io.gcp.bigquery.BigQueryWrapper(client)
-    with self.assertRaises(ValueError) as _:
+    with self.assertRaises(ValueError):
       wrapper._delete_dataset('', '')
     self.assertEqual(
         beam.io.gcp.bigquery.MAX_RETRIES + 1,
@@ -765,7 +757,7 @@ def test_delete_table_retries_fail(self, patched_time_sleep):
     client = mock.Mock()
     client.tables.Delete.side_effect = ValueError("Cannot delete")
     wrapper = beam.io.gcp.bigquery.BigQueryWrapper(client)
-    with self.assertRaises(ValueError) as _:
+    with self.assertRaises(ValueError):
       wrapper._delete_table('', '', '')
     self.assertTrue(client.tables.Delete.called)
 
@@ -800,7 +792,7 @@ def test_temporary_dataset_is_unique(self, patched_time_sleep):
         datasetReference=bigquery.DatasetReference(
             projectId='project_id', datasetId='dataset_id'))
     wrapper = beam.io.gcp.bigquery.BigQueryWrapper(client)
-    with self.assertRaises(RuntimeError) as _:
+    with self.assertRaises(RuntimeError):
       wrapper.create_temporary_dataset('project_id')
     self.assertTrue(client.datasets.Get.called)
 
diff --git a/sdks/python/apache_beam/io/gcp/gcsfilesystem_test.py b/sdks/python/apache_beam/io/gcp/gcsfilesystem_test.py
index 88a601f6d0f..c174e48778e 100644
--- a/sdks/python/apache_beam/io/gcp/gcsfilesystem_test.py
+++ b/sdks/python/apache_beam/io/gcp/gcsfilesystem_test.py
@@ -125,10 +125,9 @@ def test_match_multiples_error(self, mock_gcsio):
     expected_results = {'gs://bucket/': exception}
 
     file_system = gcsfilesystem.GCSFileSystem()
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Match operation failed') as error:
       file_system.match(['gs://bucket/'])
-    self.assertTrue(
-        error.exception.message.startswith('Match operation failed'))
     self.assertEqual(error.exception.exception_details, expected_results)
     gcsio_mock.size_of_files_in_glob.assert_called_once_with(
         'gs://bucket/*', None)
@@ -207,10 +206,9 @@ def test_copy_file_error(self, mock_gcsio):
 
     # Issue batch copy.
     file_system = gcsfilesystem.GCSFileSystem()
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Copy operation failed') as error:
       file_system.copy(sources, destinations)
-    self.assertTrue(
-        error.exception.message.startswith('Copy operation failed'))
     self.assertEqual(error.exception.exception_details, expected_results)
 
     gcsio_mock.copy.assert_called_once_with(
@@ -300,10 +298,9 @@ def test_rename_error(self, mock_gcsio):
 
     # Issue batch rename.
     file_system = gcsfilesystem.GCSFileSystem()
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Rename operation failed') as error:
       file_system.rename(sources, destinations)
-    self.assertTrue(
-        error.exception.message.startswith('Rename operation failed'))
     self.assertEqual(error.exception.exception_details, expected_results)
 
     gcsio_mock.copy_batch.assert_called_once_with([
@@ -349,9 +346,8 @@ def test_delete_error(self, mock_gcsio):
 
     # Issue batch delete.
     file_system = gcsfilesystem.GCSFileSystem()
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Delete operation failed') as error:
       file_system.delete(files)
-    self.assertTrue(
-        error.exception.message.startswith('Delete operation failed'))
     self.assertEqual(error.exception.exception_details, expected_results)
     gcsio_mock.delete_batch.assert_called()
diff --git a/sdks/python/apache_beam/io/gcp/tests/utils_test.py b/sdks/python/apache_beam/io/gcp/tests/utils_test.py
index ac09e4442e7..a5c74bb3424 100644
--- a/sdks/python/apache_beam/io/gcp/tests/utils_test.py
+++ b/sdks/python/apache_beam/io/gcp/tests/utils_test.py
@@ -60,13 +60,12 @@ def test_delete_table_fails_dataset_not_exist(self, mock_client):
     mock_client.return_value.dataset = mock_dataset
     mock_dataset.return_value.exists.return_value = False
 
-    with self.assertRaises(Exception) as e:
+    with self.assertRaisesRegexp(
+        Exception, r'^Failed to cleanup. Bigquery dataset unused_dataset '
+                   r'doesn\'t exist'):
       utils.delete_bq_table('unused_project',
                             'unused_dataset',
                             'unused_table')
-    self.assertTrue(
-        e.exception.message.startswith('Failed to cleanup. Bigquery dataset '
-                                       'unused_dataset doesn\'t exist'))
 
   @patch.object(bigquery, 'Client')
   def test_delete_table_fails_table_not_exist(self, mock_client):
@@ -78,13 +77,12 @@ def test_delete_table_fails_table_not_exist(self, mock_client):
     mock_dataset.return_value.table = mock_table
     mock_table.return_value.exists.return_value = False
 
-    with self.assertRaises(Exception) as e:
+    with self.assertRaisesRegexp(Exception,
+                                 r'^Failed to cleanup. Bigquery table '
+                                 'unused_table doesn\'t exist'):
       utils.delete_bq_table('unused_project',
                             'unused_dataset',
                             'unused_table')
-    self.assertTrue(
-        e.exception.message.startswith('Failed to cleanup. Bigquery table '
-                                       'unused_table doesn\'t exist'))
 
   @patch.object(bigquery, 'Client')
   def test_delete_table_fails_service_error(self, mock_client):
@@ -96,13 +94,12 @@ def test_delete_table_fails_service_error(self, mock_client):
     mock_dataset.return_value.table = mock_table
     mock_table.return_value.exists.return_value = True
 
-    with self.assertRaises(Exception) as e:
+    with self.assertRaisesRegexp(Exception,
+                                 r'^Failed to cleanup. Bigquery table '
+                                 'unused_table still exists'):
       utils.delete_bq_table('unused_project',
                             'unused_dataset',
                             'unused_table')
-    self.assertTrue(
-        e.exception.message.startswith('Failed to cleanup. Bigquery table '
-                                       'unused_table still exists'))
 
 
 if __name__ == '__main__':
diff --git a/sdks/python/apache_beam/io/localfilesystem_test.py b/sdks/python/apache_beam/io/localfilesystem_test.py
index 3f4530f64b7..9bc1a0774c5 100644
--- a/sdks/python/apache_beam/io/localfilesystem_test.py
+++ b/sdks/python/apache_beam/io/localfilesystem_test.py
@@ -140,10 +140,9 @@ def test_match_file_empty(self):
 
   def test_match_file_exception(self):
     # Match files with None so that it throws an exception
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Match operation failed') as error:
       self.fs.match([None])
-    self.assertTrue(
-        error.exception.message.startswith('Match operation failed'))
     self.assertEqual(error.exception.exception_details.keys(), [None])
 
   def test_match_glob(self):
@@ -175,10 +174,9 @@ def test_copy(self):
   def test_copy_error(self):
     path1 = os.path.join(self.tmpdir, 'f1')
     path2 = os.path.join(self.tmpdir, 'f2')
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Copy operation failed') as error:
       self.fs.copy([path1], [path2])
-    self.assertTrue(
-        error.exception.message.startswith('Copy operation failed'))
     self.assertEqual(error.exception.exception_details.keys(), [(path1, path2)])
 
   def test_copy_directory(self):
@@ -208,10 +206,9 @@ def test_rename(self):
   def test_rename_error(self):
     path1 = os.path.join(self.tmpdir, 'f1')
     path2 = os.path.join(self.tmpdir, 'f2')
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Rename operation failed') as error:
       self.fs.rename([path1], [path2])
-    self.assertTrue(
-        error.exception.message.startswith('Rename operation failed'))
     self.assertEqual(error.exception.exception_details.keys(), [(path1, path2)])
 
   def test_rename_directory(self):
@@ -250,8 +247,7 @@ def test_delete(self):
 
   def test_delete_error(self):
     path1 = os.path.join(self.tmpdir, 'f1')
-    with self.assertRaises(BeamIOError) as error:
+    with self.assertRaisesRegexp(BeamIOError,
+                                 r'^Delete operation failed') as error:
       self.fs.delete([path1])
-    self.assertTrue(
-        error.exception.message.startswith('Delete operation failed'))
     self.assertEqual(error.exception.exception_details.keys(), [path1])
diff --git a/sdks/python/apache_beam/io/tfrecordio_test.py b/sdks/python/apache_beam/io/tfrecordio_test.py
index f7a160a1ce4..fcafb712530 100644
--- a/sdks/python/apache_beam/io/tfrecordio_test.py
+++ b/sdks/python/apache_beam/io/tfrecordio_test.py
@@ -23,6 +23,7 @@
 import os
 import pickle
 import random
+import re
 import shutil
 import tempfile
 import unittest
@@ -79,9 +80,8 @@ def _increment_value_at_index(self, value, index):
     return ''.join(l)
 
   def _test_error(self, record, error_text):
-    with self.assertRaises(ValueError) as context:
+    with self.assertRaisesRegexp(ValueError, re.escape(error_text)):
       _TFRecordUtil.read_record(self._as_file_handle(record))
-    self.assertIn(error_text, context.exception.message)
 
   def test_masked_crc32c(self):
     self.assertEqual(0xfd7fffa, _TFRecordUtil._masked_crc32c('\x00' * 32))


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


> Clean up usage of deprecated BaseException.message
> --------------------------------------------------
>
>                 Key: BEAM-3442
>                 URL: https://issues.apache.org/jira/browse/BEAM-3442
>             Project: Beam
>          Issue Type: Bug
>          Components: sdk-py-core
>            Reporter: Udi Meiri
>            Assignee: Udi Meiri
>            Priority: Minor
>
> I believe this warning only appears in unit tests.
> sdks/python/apache_beam/io/gcp/bigquery_test.py:610: DeprecationWarning: BaseException.message has been deprecated as of Python 2.6



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)