You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2020/11/16 14:27:35 UTC

[airflow] 01/01: Fixes bad mocking of UUID and removes upper-limitation on sentry

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 8bdd4425b33641adffd5ab8bb57cb1f3baf68df7
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon Nov 16 14:09:28 2020 +0100

    Fixes bad mocking of UUID and removes upper-limitation on sentry
    
    We've been mocking UUID badly and this cause tests with
    new version of sentry to fail. This change fixes both - fixes the
    mocking as well as removes the upper constraint on sentry.
---
 setup.py                                          |  4 +---
 tests/contrib/operators/test_dataproc_operator.py | 23 +++++++++++++----------
 tests/models/test_kubernetes.py                   |  7 +++++--
 3 files changed, 19 insertions(+), 15 deletions(-)

diff --git a/setup.py b/setup.py
index 0f1ace5..64bedb7 100644
--- a/setup.py
+++ b/setup.py
@@ -373,10 +373,8 @@ sendgrid = [
     'sendgrid>=5.2.0,<6',
 ]
 sentry = [
-    # Sentry SDK 0.19.0 introduces a breaking change
-    # https://github.com/getsentry/sentry-python/issues/920#issuecomment-727658274
     'blinker>=1.1',
-    'sentry-sdk>=0.8.0, <0.19.0',
+    'sentry-sdk>=0.8.0',
 ]
 slack = [
     'slackclient>=1.0.0,<2.0.0',
diff --git a/tests/contrib/operators/test_dataproc_operator.py b/tests/contrib/operators/test_dataproc_operator.py
index 7e21ffd..d33094f 100644
--- a/tests/contrib/operators/test_dataproc_operator.py
+++ b/tests/contrib/operators/test_dataproc_operator.py
@@ -26,6 +26,7 @@ import unittest
 from typing import Dict
 
 import time
+from uuid import UUID
 
 from airflow.contrib.hooks.gcp_dataproc_hook import _DataProcJobBuilder
 from airflow.models.taskinstance import TaskInstance
@@ -77,6 +78,8 @@ WORKER_MACHINE_TYPE = 'n1-standard-4'
 WORKER_DISK_SIZE = 200
 WORKER_DISK_TYPE = 'pd-ssd'
 NUM_PREEMPTIBLE_WORKERS = 2
+MOCK_UUID = UUID('cf4a56d2-8101-4217-b027-2af6216feb48')
+MOCK_UUID_PREFIX = str(MOCK_UUID)[:8]
 GET_INIT_ACTION_TIMEOUT = "600s"  # 10m
 LABEL1 = {}  # type: Dict
 LABEL2 = {'application': 'test', 'year': 2017}
@@ -796,13 +799,13 @@ class DataProcHadoopOperatorTest(unittest.TestCase):
             schedule_interval='@daily')
 
     @mock.patch('airflow.contrib.operators.dataproc_operator.DataProcJobBaseOperator.execute')
-    @mock.patch('airflow.contrib.operators.dataproc_operator.uuid.uuid4', return_value='test')
+    @mock.patch('airflow.contrib.operators.dataproc_operator.uuid.uuid4', return_value=MOCK_UUID)
     def test_correct_job_definition(self, mock_hook, mock_uuid):
         # Expected job
         job_definition = deepcopy(DATAPROC_JOB_TO_SUBMIT)
         job_definition['job']['hadoopJob'] = {'mainClass': None}
         job_definition['job']['reference']['projectId'] = None
-        job_definition['job']['reference']['jobId'] = DATAPROC_JOB_ID + "_test"
+        job_definition['job']['reference']['jobId'] = DATAPROC_JOB_ID + "_" + MOCK_UUID_PREFIX
 
         # Prepare job using operator
         task = DataProcHadoopOperator(
@@ -880,13 +883,13 @@ class DataProcHiveOperatorTest(unittest.TestCase):
             schedule_interval='@daily')
 
     @mock.patch('airflow.contrib.operators.dataproc_operator.DataProcJobBaseOperator.execute')
-    @mock.patch('airflow.contrib.operators.dataproc_operator.uuid.uuid4', return_value='test')
+    @mock.patch('airflow.contrib.operators.dataproc_operator.uuid.uuid4', return_value=MOCK_UUID)
     def test_correct_job_definition(self, mock_hook, mock_uuid):
         # Expected job
         job_definition = deepcopy(DATAPROC_JOB_TO_SUBMIT)
         job_definition['job']['hiveJob'] = {'queryFileUri': None}
         job_definition['job']['reference']['projectId'] = None
-        job_definition['job']['reference']['jobId'] = DATAPROC_JOB_ID + "_test"
+        job_definition['job']['reference']['jobId'] = DATAPROC_JOB_ID + "_" + MOCK_UUID_PREFIX
 
         # Prepare job using operator
         task = DataProcHiveOperator(
@@ -963,13 +966,13 @@ class DataProcPigOperatorTest(unittest.TestCase):
             schedule_interval='@daily')
 
     @mock.patch('airflow.contrib.operators.dataproc_operator.DataProcJobBaseOperator.execute')
-    @mock.patch('airflow.contrib.operators.dataproc_operator.uuid.uuid4', return_value='test')
+    @mock.patch('airflow.contrib.operators.dataproc_operator.uuid.uuid4', return_value=MOCK_UUID)
     def test_correct_job_definition(self, mock_hook, mock_uuid):
         # Expected job
         job_definition = deepcopy(DATAPROC_JOB_TO_SUBMIT)
         job_definition['job']['pigJob'] = {'queryFileUri': None}
         job_definition['job']['reference']['projectId'] = None
-        job_definition['job']['reference']['jobId'] = DATAPROC_JOB_ID + "_test"
+        job_definition['job']['reference']['jobId'] = DATAPROC_JOB_ID + "_" + MOCK_UUID_PREFIX
 
         # Prepare job using operator
         task = DataProcPigOperator(
@@ -1052,13 +1055,13 @@ class DataProcPySparkOperatorTest(unittest.TestCase):
             schedule_interval='@daily')
 
     @mock.patch('airflow.contrib.operators.dataproc_operator.DataProcJobBaseOperator.execute')
-    @mock.patch('airflow.contrib.operators.dataproc_operator.uuid.uuid4', return_value='test')
+    @mock.patch('airflow.contrib.operators.dataproc_operator.uuid.uuid4', return_value=MOCK_UUID)
     def test_correct_job_definition(self, mock_hook, mock_uuid):
         # Expected job
         job_definition = deepcopy(DATAPROC_JOB_TO_SUBMIT)
         job_definition['job']['pysparkJob'] = {'mainPythonFileUri': 'main_class'}
         job_definition['job']['reference']['projectId'] = None
-        job_definition['job']['reference']['jobId'] = DATAPROC_JOB_ID + "_test"
+        job_definition['job']['reference']['jobId'] = DATAPROC_JOB_ID + "_" + MOCK_UUID_PREFIX
 
         # Prepare job using operator
         task = DataProcPySparkOperator(
@@ -1139,13 +1142,13 @@ class DataProcSparkOperatorTest(unittest.TestCase):
             schedule_interval='@daily')
 
     @mock.patch('airflow.contrib.operators.dataproc_operator.DataProcJobBaseOperator.execute')
-    @mock.patch('airflow.contrib.operators.dataproc_operator.uuid.uuid4', return_value='test')
+    @mock.patch('airflow.contrib.operators.dataproc_operator.uuid.uuid4', return_value=MOCK_UUID)
     def test_correct_job_definition(self, mock_hook, mock_uuid):
         # Expected job
         job_definition = deepcopy(DATAPROC_JOB_TO_SUBMIT)
         job_definition['job']['sparkJob'] = {'mainClass': 'main_class'}
         job_definition['job']['reference']['projectId'] = None
-        job_definition['job']['reference']['jobId'] = DATAPROC_JOB_ID + "_test"
+        job_definition['job']['reference']['jobId'] = DATAPROC_JOB_ID + "_" + MOCK_UUID_PREFIX
 
         # Prepare job using operator
         task = DataProcSparkOperator(
diff --git a/tests/models/test_kubernetes.py b/tests/models/test_kubernetes.py
index f18bd5b..2fe63af 100644
--- a/tests/models/test_kubernetes.py
+++ b/tests/models/test_kubernetes.py
@@ -18,12 +18,15 @@
 # under the License.
 
 import unittest
+from uuid import UUID
 
 from mock import patch
 
 from airflow import settings
 from airflow.models import KubeResourceVersion, KubeWorkerIdentifier
 
+MOCK_UUID = UUID('cf4a56d2-8101-4217-b027-2af6216feb48')
+
 
 class TestKubeResourceVersion(unittest.TestCase):
 
@@ -47,9 +50,9 @@ class TestKubeWorkerIdentifier(unittest.TestCase):
         session.query(KubeWorkerIdentifier).update({
             KubeWorkerIdentifier.worker_uuid: ''
         })
-        mock_uuid.return_value = 'abcde'
+        mock_uuid.return_value = MOCK_UUID
         worker_uuid = KubeWorkerIdentifier.get_or_create_current_kube_worker_uuid(session)
-        self.assertEqual(worker_uuid, 'abcde')
+        self.assertEqual(worker_uuid, str(MOCK_UUID))
 
     def test_get_or_create_exist(self):
         session = settings.Session()