You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2020/12/26 00:32:07 UTC

[airflow] branch master updated: add system test for azure local to adls operator (#13190)

This is an automated email from the ASF dual-hosted git repository.

kamilbregula pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/master by this push:
     new a1e9195  add system test for azure local to adls operator (#13190)
a1e9195 is described below

commit a1e91950766d12022a89bd667cc1ef1a4dec387c
Author: Ephraim Anierobi <sp...@gmail.com>
AuthorDate: Sat Dec 26 01:31:55 2020 +0100

    add system test for azure local to adls operator (#13190)
---
 .../azure/example_dags/example_local_to_adls.py    | 10 ++++-
 .../azure/transfers/test_local_to_adls_system.py   | 48 ++++++++++++++++++++++
 2 files changed, 56 insertions(+), 2 deletions(-)

diff --git a/airflow/providers/microsoft/azure/example_dags/example_local_to_adls.py b/airflow/providers/microsoft/azure/example_dags/example_local_to_adls.py
index 294b5f7..1735ba0 100644
--- a/airflow/providers/microsoft/azure/example_dags/example_local_to_adls.py
+++ b/airflow/providers/microsoft/azure/example_dags/example_local_to_adls.py
@@ -18,12 +18,12 @@
 import os
 
 from airflow import models
+from airflow.providers.microsoft.azure.operators.adls_delete import AzureDataLakeStorageDeleteOperator
 from airflow.providers.microsoft.azure.transfers.local_to_adls import LocalToAzureDataLakeStorageOperator
 from airflow.utils.dates import days_ago
 
 LOCAL_FILE_PATH = os.environ.get("LOCAL_FILE_PATH", 'localfile.txt')
-REMOTE_FILE_PATH = os.environ.get("REMOTE_LOCAL_PATH", 'remote')
-
+REMOTE_FILE_PATH = os.environ.get("REMOTE_LOCAL_PATH", 'remote.txt')
 
 with models.DAG(
     "example_local_to_adls",
@@ -38,3 +38,9 @@ with models.DAG(
         remote_path=REMOTE_FILE_PATH,
     )
     # [END howto_operator_local_to_adls]
+
+    delete_file = AzureDataLakeStorageDeleteOperator(
+        task_id="remove_task", path=REMOTE_FILE_PATH, recursive=True
+    )
+
+    upload_file >> delete_file
diff --git a/tests/providers/microsoft/azure/transfers/test_local_to_adls_system.py b/tests/providers/microsoft/azure/transfers/test_local_to_adls_system.py
new file mode 100644
index 0000000..cd0363f
--- /dev/null
+++ b/tests/providers/microsoft/azure/transfers/test_local_to_adls_system.py
@@ -0,0 +1,48 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import pytest
+
+from airflow.providers.microsoft.azure.example_dags.example_local_to_adls import LOCAL_FILE_PATH
+from tests.test_utils.azure_system_helpers import (
+    AZURE_DAG_FOLDER,
+    AzureSystemTest,
+    provide_azure_data_lake_default_connection,
+)
+
+CREDENTIALS_DIR = os.environ.get('CREDENTIALS_DIR', '/files/airflow-breeze-config/keys')
+DATA_LAKE_DEFAULT_KEY = 'azure_data_lake.json'
+CREDENTIALS_PATH = os.path.join(CREDENTIALS_DIR, DATA_LAKE_DEFAULT_KEY)
+
+
+@pytest.mark.backend('postgres', 'mysql')
+@pytest.mark.credential_file(DATA_LAKE_DEFAULT_KEY)
+class LocalToAdlsSystem(AzureSystemTest):
+    def setUp(self):
+        super().setUp()
+        with open(LOCAL_FILE_PATH, 'w+') as file:
+            file.writelines(['example test files'])
+
+    def tearDown(self):
+        os.remove(LOCAL_FILE_PATH)
+        super().tearDown()
+
+    @provide_azure_data_lake_default_connection(CREDENTIALS_PATH)
+    def test_run_example_local_to_adls(self):
+        self.run_dag('example_local_to_adls', AZURE_DAG_FOLDER)