You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2022/04/25 03:10:41 UTC

[GitHub] [airflow] dbarrundiag commented on a diff in pull request #22692: Add support for Delta Sharing protocol

dbarrundiag commented on code in PR #22692:
URL: https://github.com/apache/airflow/pull/22692#discussion_r857231218


##########
airflow/providers/delta/sharing/operators/delta_sharing.py:
##########
@@ -0,0 +1,227 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+"""This module contains Delta Sharing operators."""
+import json
+import os.path
+from concurrent.futures import ThreadPoolExecutor, as_completed
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence
+from urllib.parse import urlparse
+
+import requests
+
+from airflow import AirflowException
+from airflow.models import BaseOperator
+from airflow.providers.delta.sharing.hooks.delta_sharing import DeltaSharingHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+class DeltaSharingLocalDownloadOperator(BaseOperator):
+    """
+    Operator to download data from a Delta Sharing table to a local disk.
+
+    :param share: name of the share in which check will be performed.
+        This field will be templated.
+    :param schema: name of the schema (database) in which check will be performed.
+        This field will be templated.
+    :param table: name of the table to check.
+        This field will be templated.
+    :param location: name of directory where downloaded data will be stored.
+        This field will be templated.
+    :param limit: optional limit on the number of records to return.
+    :param predicates: optional list of strings that will be ANDed to build a filter expression.
+        This field will be templated.
+    :param save_partitioned: if True, data will be saved by partitions. if False, all data files will be
+       saved into a top-level directory.
+    :param save_metadata:  if True, metadata will be shared into a ``_metadata/<version>.json`` file
+    :param save_stats: if True, per-file statistics will be saved into a ``_stats/<data_file_name>.json``
+    :param overwrite_existing: Overwrite existing files.  False by default. If file with the same name exists
+        and has the same size as returned in file metadata, then it won't be re-downloaded.
+    :param num_parallel_downloads: number of parallel downloads. Default is 5.
+    :param delta_sharing_conn_id: Reference to the
+        :ref:`Delta Sharing connection <howto/connection:delta_sharing>`.
+        By default and in the common case this will be ``delta_sharing_default``. To use
+        token based authentication, provide the bearer token in the password field for the
+        connection and put the base URL in the ``host`` field.
+    :param profile_file: Optional path or HTTP(S) URL to a Delta Sharing profile file.
+        If this parameter is specified, the ``delta_sharing_conn_id`` isn't used.
+    :param timeout_seconds: The timeout for this run. By default a value of 0 is used
+        which means to have no timeout.
+    :param retry_limit: Amount of times retry if the Delta Sharing backend is
+        unreachable. Its value must be greater than or equal to 1.
+    :param retry_delay: Number of seconds for initial wait between retries (it
+            might be a floating point number).
+    :param retry_args: An optional dictionary with arguments passed to ``tenacity.Retrying`` class.
+    """
+
+    template_fields: Sequence[str] = (
+        'share',
+        'schema',
+        'table',
+        'predicates',
+        'location',
+    )
+
+    # Used in airflow.models.BaseOperator
+    # Delta Sharing brand color (blue) under white text
+    ui_color = '#1CB1C2'
+    ui_fgcolor = '#fff'
+
+    def __init__(
+        self,
+        *,
+        share: str,
+        schema: str,
+        table: str,
+        location: str,
+        limit: Optional[int] = None,
+        predicates: Optional[List[str]] = None,
+        save_partitioned: bool = True,
+        save_metadata: bool = True,
+        save_stats: bool = True,
+        overwrite_existing: bool = False,
+        num_parallel_downloads: int = 5,
+        delta_sharing_conn_id: str = 'delta_sharing_default',
+        profile_file: Optional[str] = None,
+        timeout_seconds: int = 180,
+        retry_limit: int = 3,
+        retry_delay: float = 2.0,
+        retry_args: Optional[Dict[Any, Any]] = None,
+        **kwargs,
+    ) -> None:
+        """Creates a new ``DeltaSharingDownloadToLocalOperator``."""
+        super().__init__(**kwargs)
+        self.share = share
+        self.schema = schema
+        self.table = table
+        self.location = location
+        if limit is not None and limit < 0:
+            raise ValueError(f"limit should be greater or equal to 0, got {limit}")
+        self.limit = limit
+        self.predicates = predicates
+        self.save_partitioned = save_partitioned
+        self.save_stats = save_stats
+        self.save_metadata = save_metadata
+        self.overwrite_existing = overwrite_existing
+        if num_parallel_downloads < 1:
+            raise ValueError(
+                "num_parallel_downloads should be greater or equal to 1," f" got {num_parallel_downloads}"
+            )
+        self.num_parallel_downloads = num_parallel_downloads
+
+        self.hook = DeltaSharingHook(
+            delta_sharing_conn_id=delta_sharing_conn_id,
+            retry_args=retry_args,
+            retry_delay=retry_delay,
+            retry_limit=retry_limit,
+            timeout_seconds=timeout_seconds,
+            profile_file=profile_file,
+        )
+
+    def _get_output_file_path(self, metadata: Dict[str, Any], file: Dict[str, Any]) -> str:
+        chunks = urlparse(file['url'])
+        file_name = chunks.path.split('/')[-1]
+        file_parts = [self.location]
+        # add partition parts if table is partitioned
+        partition_values = file.get('partitionValues', {})
+        if self.save_partitioned and len(partition_values) > 0:
+            partitions = metadata.get('partitionColumns', [])
+            for part in partitions:
+                part_value = partition_values.get(part)
+                if part_value is None:
+                    self.log.warning(f"There is no value for partition '{part}'")
+                    part_value = "__null__"
+                file_parts.append(f"{part}={part_value}")
+
+        os.makedirs(os.path.join(*file_parts), exist_ok=True)
+        file_parts.append(file_name)
+        return os.path.join(*file_parts)
+
+    def _download_one(self, metadata: Dict[str, Any], file: Dict[str, Any]):
+        dest_file_path = self._get_output_file_path(metadata, file)
+        file_size = file['size']
+        if os.path.exists(dest_file_path):
+            stat = os.stat(dest_file_path)
+            if file_size == stat.st_size and not self.overwrite_existing:

Review Comment:
   Might be safer if we do some sort of MD5 checksum check here and not just the size of the file?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscribe@airflow.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org