You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2022/02/24 14:50:13 UTC

[GitHub] [airflow] mik-laj commented on a change in pull request #20882: Add Looker PDT operators

mik-laj commented on a change in pull request #20882:
URL: https://github.com/apache/airflow/pull/20882#discussion_r813954369



##########
File path: airflow/providers/google/cloud/hooks/looker.py
##########
@@ -0,0 +1,268 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+"""This module contains a Google Cloud Looker hook."""
+
+import json
+import time
+from enum import Enum
+from typing import Dict, Optional
+
+from looker_sdk.rtl import api_settings, auth_session, requests_transport, serialize
+from looker_sdk.sdk.api40 import methods as methods40
+from packaging.version import parse as parse_version
+
+from airflow.exceptions import AirflowException
+from airflow.hooks.base import BaseHook
+from airflow.models.connection import Connection
+from airflow.version import version
+
+
+class LookerHook(BaseHook):
+    """Hook for Looker APIs."""
+
+    def __init__(
+        self,
+        looker_conn_id: str,
+    ) -> None:
+        super().__init__()
+        self.looker_conn_id = looker_conn_id
+        self.source = self.get_api_source()
+
+    def start_pdt_build(
+        self,
+        model: str,
+        view: str,
+        query_params: Optional[Dict] = None,
+    ):
+        """
+        Submits a PDT materialization job to Looker.
+
+        :param model: Required. The model of the PDT to start building.
+        :param view: Required. The view of the PDT to start building.
+        :param query_params: Optional. Additional materialization parameters.
+        """
+        self.log.info("Submitting PDT materialization job. Model: '%s', view: '%s'.", model, view)
+        self.log.debug("PDT materialization job source: '%s'.", self.source)
+
+        sdk = self.get_looker_sdk()
+        looker_ver = sdk.versions().looker_release_version
+        if parse_version(looker_ver) < parse_version("22.2.0"):
+            raise AirflowException(f'This API requires Looker version 22.2+. Found: {looker_ver}.')
+
+        # unpack query_params dict into kwargs (if not None)
+        if query_params:
+            resp = sdk.start_pdt_build(model_name=model, view_name=view, source=self.source, **query_params)
+        else:
+            resp = sdk.start_pdt_build(model_name=model, view_name=view, source=self.source)
+
+        self.log.info("Start PDT build response: '%s'.", resp)
+
+        return resp
+
+    def check_pdt_build(
+        self,
+        materialization_id: str,
+    ):
+        """
+        Gets the PDT materialization job status from Looker.
+
+        :param materialization_id: Required. The materialization id to check status for.
+        """
+        self.log.info("Requesting PDT materialization job status. Job id: %s.", materialization_id)
+
+        sdk = self.get_looker_sdk()
+        resp = sdk.check_pdt_build(materialization_id=materialization_id)
+
+        self.log.info("Check PDT build response: '%s'.", resp)
+        return resp
+
+    def pdt_build_status(
+        self,
+        materialization_id: str,
+    ) -> Dict:
+        """
+        Gets the PDT materialization job status.
+
+        :param materialization_id: Required. The materialization id to check status for.
+        """
+        resp = self.check_pdt_build(materialization_id=materialization_id)
+
+        status_json = resp['resp_text']
+        status_dict = json.loads(status_json)
+
+        self.log.info(
+            "PDT materialization job id: %s. Status: '%s'.", materialization_id, status_dict['status']
+        )
+
+        return status_dict
+
+    def stop_pdt_build(
+        self,
+        materialization_id: str,
+    ):
+        """
+        Starts a PDT materialization job cancellation request.
+
+        :param materialization_id: Required. The materialization id to stop.
+        """
+        self.log.info("Stopping PDT materialization. Job id: %s.", materialization_id)
+        self.log.debug("PDT materialization job source: '%s'.", self.source)
+
+        sdk = self.get_looker_sdk()
+        resp = sdk.stop_pdt_build(materialization_id=materialization_id, source=self.source)
+
+        self.log.info("Stop PDT build response: '%s'.", resp)
+        return resp
+
+    def wait_for_job(
+        self,
+        materialization_id: str,
+        wait_time: int = 10,
+        timeout: Optional[int] = None,
+    ) -> None:
+        """
+        Helper method which polls a PDT materialization job to check if it finishes.
+
+        :param materialization_id: Required. The materialization id to wait for.
+        :param wait_time: Optional. Number of seconds between checks.
+        :param timeout: Optional. How many seconds wait for job to be ready.
+        Used only if ``asynchronous`` is False.
+        """
+        self.log.info('Waiting for PDT materialization job to complete. Job id: %s.', materialization_id)
+
+        status = None
+        start = time.monotonic()
+
+        while status not in (
+            JobStatus.DONE.value,
+            JobStatus.ERROR.value,
+            JobStatus.CANCELLED.value,
+            JobStatus.UNKNOWN.value,
+        ):
+
+            if timeout and start + timeout < time.monotonic():
+                raise AirflowException(
+                    f"Timeout: PDT materialization job is not ready after {timeout}s. "
+                    f"Job id: {materialization_id}."
+                )
+
+            time.sleep(wait_time)
+
+            status_dict = self.pdt_build_status(materialization_id=materialization_id)
+            status = status_dict['status']
+
+        if status == JobStatus.ERROR.value:
+            msg = status_dict['message']
+            raise AirflowException(
+                f'PDT materialization job failed. Job id: {materialization_id}. Message:\n"{msg}"'
+            )
+        if status == JobStatus.CANCELLED.value:
+            raise AirflowException(f'PDT materialization job was cancelled. Job id: {materialization_id}.')
+        if status == JobStatus.UNKNOWN.value:
+            raise AirflowException(
+                f'PDT materialization job has unknown status. Job id: {materialization_id}.'
+            )
+
+        self.log.info('PDT materialization job completed successfully. Job id: %s.', materialization_id)
+
+    def get_api_source(self):
+        """Returns origin of the API call."""
+
+        # Infer API source based on Airflow version, e.g.:
+        # "2.1.4+composer" -> Cloud Composer
+        # "2.1.4" -> Airflow
+
+        self.log.debug(
+            "Got the following version for connection id: '%s'. Version: '%s'.", self.looker_conn_id, version
+        )
+
+        return 'composer' if 'composer' in version else 'airflow'

Review comment:
       We don't want any SasS-specific code in our code. If you need to identify environments, you can use an environment variable and configure this parameter based on it.
   For example, see: 
   https://github.com/apache/airflow/blob/272d242316a1d163598d93511e2bfd7b717b4dce/airflow/providers/snowflake/hooks/snowflake.py#L184




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscribe@airflow.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org