You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by as...@apache.org on 2021/12/10 10:07:09 UTC
[airflow] branch main updated: Remove db call from `DatabricksHook.__init__()` (#20180)
This is an automated email from the ASF dual-hosted git repository.
ash pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 66f94f9 Remove db call from `DatabricksHook.__init__()` (#20180)
66f94f9 is described below
commit 66f94f95c2e92baad2761b5a1fa405e36c17808a
Author: Josh Fell <48...@users.noreply.github.com>
AuthorDate: Fri Dec 10 05:05:37 2021 -0500
Remove db call from `DatabricksHook.__init__()` (#20180)
---
airflow/providers/databricks/hooks/databricks.py | 15 ++++++++++-----
1 file changed, 10 insertions(+), 5 deletions(-)
diff --git a/airflow/providers/databricks/hooks/databricks.py b/airflow/providers/databricks/hooks/databricks.py
index ac8d951..e56b15a 100644
--- a/airflow/providers/databricks/hooks/databricks.py
+++ b/airflow/providers/databricks/hooks/databricks.py
@@ -135,11 +135,7 @@ class DatabricksHook(BaseHook):
) -> None:
super().__init__()
self.databricks_conn_id = databricks_conn_id
- self.databricks_conn = self.get_connection(databricks_conn_id)
- if 'host' in self.databricks_conn.extra_dejson:
- self.host = self._parse_host(self.databricks_conn.extra_dejson['host'])
- else:
- self.host = self._parse_host(self.databricks_conn.host)
+ self.databricks_conn = None
self.timeout_seconds = timeout_seconds
if retry_limit < 1:
raise ValueError('Retry limit must be greater than equal to 1')
@@ -303,6 +299,15 @@ class DatabricksHook(BaseHook):
:rtype: dict
"""
method, endpoint = endpoint_info
+
+ if self.databricks_conn is None:
+ self.databricks_conn = self.get_connection(self.databricks_conn_id)
+
+ if 'host' in self.databricks_conn.extra_dejson:
+ self.host = self._parse_host(self.databricks_conn.extra_dejson['host'])
+ else:
+ self.host = self._parse_host(self.databricks_conn.host)
+
url = f'https://{self.host}/{endpoint}'
aad_headers = self._get_aad_headers()