You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by sa...@apache.org on 2016/11/20 19:54:25 UTC

incubator-airflow git commit: [AIRFLOW-375] Fix pylint errors

Repository: incubator-airflow
Updated Branches:
  refs/heads/master 4e550cb9e -> 72cc8b300


[AIRFLOW-375] Fix pylint errors

Closes #1893 from zodiac/landscape_errors


Project: http://git-wip-us.apache.org/repos/asf/incubator-airflow/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-airflow/commit/72cc8b30
Tree: http://git-wip-us.apache.org/repos/asf/incubator-airflow/tree/72cc8b30
Diff: http://git-wip-us.apache.org/repos/asf/incubator-airflow/diff/72cc8b30

Branch: refs/heads/master
Commit: 72cc8b3006576153aa30d27643807b4ae5dfb593
Parents: 4e550cb
Author: Li Xuanji <xu...@gmail.com>
Authored: Sun Nov 20 11:54:13 2016 -0800
Committer: Siddharth Anand <si...@yahoo.com>
Committed: Sun Nov 20 11:54:13 2016 -0800

----------------------------------------------------------------------
 airflow/hooks/hive_hooks.py                  | 5 ++++-
 airflow/operators/redshift_to_s3_operator.py | 2 +-
 airflow/operators/sensors.py                 | 2 +-
 3 files changed, 6 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/72cc8b30/airflow/hooks/hive_hooks.py
----------------------------------------------------------------------
diff --git a/airflow/hooks/hive_hooks.py b/airflow/hooks/hive_hooks.py
index d7803b8..3e7d2db 100644
--- a/airflow/hooks/hive_hooks.py
+++ b/airflow/hooks/hive_hooks.py
@@ -278,7 +278,7 @@ class HiveCliHook(BaseHook):
             field_dict=None,
             delimiter=',',
             encoding='utf8',
-            pandas_kwargs={}, **kwargs):
+            pandas_kwargs=None, **kwargs):
         """
         Loads a pandas DataFrame into hive.
 
@@ -317,6 +317,9 @@ class HiveCliHook(BaseHook):
 
             return dict((col, DTYPE_KIND_HIVE_TYPE[dtype.kind]) for col, dtype in df.dtypes.iteritems())
 
+        if pandas_kwargs is None:
+            pandas_kwargs = {}
+
         with TemporaryDirectory(prefix='airflow_hiveop_') as tmp_dir:
             with NamedTemporaryFile(dir=tmp_dir) as f:
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/72cc8b30/airflow/operators/redshift_to_s3_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/redshift_to_s3_operator.py b/airflow/operators/redshift_to_s3_operator.py
index 155d84f..d9ef59d 100644
--- a/airflow/operators/redshift_to_s3_operator.py
+++ b/airflow/operators/redshift_to_s3_operator.py
@@ -52,7 +52,7 @@ class RedshiftToS3Transfer(BaseOperator):
             s3_key,
             redshift_conn_id='redshift_default',
             s3_conn_id='s3_default',
-            unload_options=[],
+            unload_options=tuple(),
             autocommit=False,
             parameters=None,
             *args, **kwargs):

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/72cc8b30/airflow/operators/sensors.py
----------------------------------------------------------------------
diff --git a/airflow/operators/sensors.py b/airflow/operators/sensors.py
index 58040bc..8dd1b71 100644
--- a/airflow/operators/sensors.py
+++ b/airflow/operators/sensors.py
@@ -149,7 +149,7 @@ class MetastorePartitionSensor(SqlSensor):
         self.schema = schema
         self.first_poke = True
         self.conn_id = mysql_conn_id
-        super(SqlSensor, self).__init__(*args, **kwargs)
+        super(MetastorePartitionSensor, self).__init__(*args, **kwargs)
 
     def poke(self, context):
         if self.first_poke: