You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2022/03/11 18:25:59 UTC

[airflow] branch main updated: Remove some really old Airflow 1.10 compatibility shims (#22187)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 6e0a689  Remove some really old Airflow 1.10 compatibility shims (#22187)
6e0a689 is described below

commit 6e0a68983ef05a132dbfdbd102468678ed0f9182
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Fri Mar 11 19:24:45 2022 +0100

    Remove some really old Airflow 1.10 compatibility shims (#22187)
    
    We used some Airflow 1.10 compatibility shims in system
    tests and Breeze's entrypoints, in order to be able to run
    1.10 still if needed, but the need for that is completely gone
    for months now. Time to remove it has long been overdue.
---
 scripts/in_container/check_environment.sh          | 16 ++--------
 scripts/in_container/entrypoint_ci.sh              |  8 -----
 .../operators/test_spark_kubernetes_system.py      |  4 ---
 .../snowflake/operators/test_snowflake_system.py   | 35 ----------------------
 tests/test_utils/system_tests_class.py             |  9 ------
 5 files changed, 3 insertions(+), 69 deletions(-)

diff --git a/scripts/in_container/check_environment.sh b/scripts/in_container/check_environment.sh
index ca1a365..d525fb1 100755
--- a/scripts/in_container/check_environment.sh
+++ b/scripts/in_container/check_environment.sh
@@ -116,11 +116,7 @@ function resetdb_if_requested() {
         echo
         echo "Resetting the DB"
         echo
-        if [[ ${RUN_AIRFLOW_1_10} == "true" ]]; then
-            airflow resetdb -y
-        else
-            airflow db reset -y
-        fi
+        airflow db reset -y
         echo
         echo "Database has been reset"
         echo
@@ -138,14 +134,8 @@ function startairflow_if_requested() {
 
         . "$( dirname "${BASH_SOURCE[0]}" )/configure_environment.sh"
 
-        # initialize db and create the admin user if it's a new run
-        if [[ ${RUN_AIRFLOW_1_10} == "true" ]]; then
-            airflow initdb
-            airflow create_user -u admin -p admin -f Thor -l Adminstra -r Admin -e dummy@dummy.email || true
-        else
-            airflow db init
-            airflow users create -u admin -p admin -f Thor -l Adminstra -r Admin -e dummy@dummy.email
-        fi
+        airflow db init
+        airflow users create -u admin -p admin -f Thor -l Adminstra -r Admin -e dummy@dummy.email
 
         . "$( dirname "${BASH_SOURCE[0]}" )/run_init_script.sh"
 
diff --git a/scripts/in_container/entrypoint_ci.sh b/scripts/in_container/entrypoint_ci.sh
index f4eba9a..77a5ca9 100755
--- a/scripts/in_container/entrypoint_ci.sh
+++ b/scripts/in_container/entrypoint_ci.sh
@@ -63,12 +63,6 @@ RUN_TESTS=${RUN_TESTS:="false"}
 CI=${CI:="false"}
 USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION:=""}"
 
-if [[ ${AIRFLOW_VERSION} == *1.10* || ${USE_AIRFLOW_VERSION} == *1.10* ]]; then
-    export RUN_AIRFLOW_1_10="true"
-else
-    export RUN_AIRFLOW_1_10="false"
-fi
-
 if [[ ${USE_AIRFLOW_VERSION} == "" ]]; then
     export PYTHONPATH=${AIRFLOW_SOURCES}
     echo
@@ -144,8 +138,6 @@ if [[ ${USE_PACKAGES_FROM_DIST=} == "true" ]]; then
     fi
 fi
 
-export RUN_AIRFLOW_1_10=${RUN_AIRFLOW_1_10:="false"}
-
 # Added to have run-tests on path
 export PATH=${PATH}:${AIRFLOW_SOURCES}
 
diff --git a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes_system.py b/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes_system.py
index b85de2d53..6b00c26 100644
--- a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes_system.py
+++ b/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes_system.py
@@ -19,8 +19,6 @@ import subprocess
 
 import pytest
 
-from airflow.models import Connection
-from airflow.utils import db
 from tests.test_utils import AIRFLOW_MAIN_FOLDER
 from tests.test_utils.system_tests_class import SystemTest
 
@@ -61,8 +59,6 @@ class SparkKubernetesExampleDagsSystemTest(SystemTest):
     def setUp(self):
         super().setUp()
         kubectl_apply_list(SPARK_OPERATOR_MANIFESTS)
-        if os.environ.get("RUN_AIRFLOW_1_10") == "true":
-            db.merge_conn(Connection(conn_id='kubernetes_default', conn_type='kubernetes'))
 
     def tearDown(self):
         super().tearDown()
diff --git a/tests/providers/snowflake/operators/test_snowflake_system.py b/tests/providers/snowflake/operators/test_snowflake_system.py
index dd1a362..5507125 100644
--- a/tests/providers/snowflake/operators/test_snowflake_system.py
+++ b/tests/providers/snowflake/operators/test_snowflake_system.py
@@ -15,13 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-import json
 import os
 
 import pytest
 
-from airflow.models import Connection
-from airflow.utils import db
 from tests.test_utils import AIRFLOW_MAIN_FOLDER
 from tests.test_utils.system_tests_class import SystemTest
 
@@ -37,37 +34,5 @@ class SnowflakeExampleDagsSystemTest(SystemTest):
     def setUp(self):
         super().setUp()
 
-        if os.environ.get('RUN_AIRFLOW_1_10') == 'true':
-            with open(SNOWFLAKE_CREDENTIALS_PATH) as f:
-                # Example:
-                # {
-                #     "account": "foo",
-                #     "region": "us-west-2",
-                #     "user": "airflow",
-                #     "password": "secret",
-                #     "warehouse": "shared",
-                #     "database": "test",
-                #     "schema": "public",
-                #     "role": "airflow"
-                # }
-                credentials = json.load(f)
-
-            extra = {
-                'account': credentials['account'],
-                'region': credentials['region'],
-                'role': credentials['role'],
-                'warehouse': credentials['warehouse'],
-                'database': credentials['database'],
-            }
-            conn = Connection(
-                conn_id='snowflake_conn_id',
-                login=credentials['user'],
-                password=credentials['password'],
-                schema=credentials['schema'],
-                conn_type='snowflake',
-                extra=json.dumps(extra),
-            )
-            db.merge_conn(conn)
-
     def test_dag_example(self):
         self.run_dag('example_snowflake', SNOWFLAKE_DAG_FOLDER)
diff --git a/tests/test_utils/system_tests_class.py b/tests/test_utils/system_tests_class.py
index a53e042..be9cf73 100644
--- a/tests/test_utils/system_tests_class.py
+++ b/tests/test_utils/system_tests_class.py
@@ -17,7 +17,6 @@
 # under the License.
 import os
 import shutil
-import sys
 from datetime import datetime
 from pathlib import Path
 from unittest import TestCase
@@ -124,14 +123,6 @@ class SystemTest(TestCase, LoggingMixin):
         :param dag_id: id of a DAG to be run
         :param dag_folder: directory where to look for the specific DAG. Relative to AIRFLOW_HOME.
         """
-        if os.environ.get("RUN_AIRFLOW_1_10") == "true":
-            # For system tests purpose we are changing airflow/providers
-            # to side packages path of the installed providers package
-            python = f"python{sys.version_info.major}.{sys.version_info.minor}"
-            dag_folder = dag_folder.replace(
-                "/opt/airflow/airflow/providers",
-                f"/usr/local/lib/{python}/site-packages/airflow/providers",
-            )
         self.log.info("Looking for DAG: %s in %s", dag_id, dag_folder)
         dag_bag = DagBag(dag_folder=dag_folder, include_examples=False)
         dag = dag_bag.get_dag(dag_id)