You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2020/06/25 18:37:02 UTC

[airflow] 01/02: Add default `conf` parameter to Spark JDBC Hook (#8787)

This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 450dd520292d173627aac52b2c6638b63a07c81c
Author: Rafael Bottega <rb...@worldremit.com>
AuthorDate: Sun May 10 00:07:04 2020 +0100

    Add default `conf` parameter to Spark JDBC Hook (#8787)
    
    (cherry picked from commit 7506c73f1721151e9c50ef8bdb70d2136a16190b)
---
 airflow/contrib/hooks/spark_jdbc_hook.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/airflow/contrib/hooks/spark_jdbc_hook.py b/airflow/contrib/hooks/spark_jdbc_hook.py
index c188b1e..a295fa3 100644
--- a/airflow/contrib/hooks/spark_jdbc_hook.py
+++ b/airflow/contrib/hooks/spark_jdbc_hook.py
@@ -144,7 +144,7 @@ class SparkJDBCHook(SparkSubmitHook):
         super(SparkJDBCHook, self).__init__(*args, **kwargs)
         self._name = spark_app_name
         self._conn_id = spark_conn_id
-        self._conf = spark_conf
+        self._conf = spark_conf or {}
         self._py_files = spark_py_files
         self._files = spark_files
         self._jars = spark_jars