You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by cr...@apache.org on 2016/07/14 22:11:30 UTC

incubator-airflow git commit: [AIRFLOW-335] Fix simple style errors/warnings

Repository: incubator-airflow
Updated Branches:
  refs/heads/master aea1fa2d6 -> dc1bdf622


[AIRFLOW-335] Fix simple style errors/warnings

Closes #1665 from skudriashev/airflow-335


Project: http://git-wip-us.apache.org/repos/asf/incubator-airflow/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-airflow/commit/dc1bdf62
Tree: http://git-wip-us.apache.org/repos/asf/incubator-airflow/tree/dc1bdf62
Diff: http://git-wip-us.apache.org/repos/asf/incubator-airflow/diff/dc1bdf62

Branch: refs/heads/master
Commit: dc1bdf6222094e908b75056f9d6b0d14419e8777
Parents: aea1fa2
Author: Stanislav Kudriashev <st...@gmail.com>
Authored: Thu Jul 14 15:11:23 2016 -0700
Committer: Chris Riccomini <ch...@wepay.com>
Committed: Thu Jul 14 15:11:23 2016 -0700

----------------------------------------------------------------------
 airflow/contrib/auth/backends/github_enterprise_auth.py   |  1 +
 airflow/contrib/auth/backends/ldap_auth.py                |  4 ++--
 airflow/contrib/hooks/bigquery_hook.py                    |  8 ++++----
 airflow/contrib/hooks/qubole_hook.py                      |  2 +-
 airflow/contrib/hooks/ssh_hook.py                         |  5 ++---
 airflow/contrib/operators/mysql_to_gcs.py                 |  4 ++--
 airflow/contrib/operators/sqoop_operator.py               |  2 +-
 .../example_passing_params_via_test_command.py            |  6 +++---
 airflow/example_dags/subdags/subdag.py                    |  6 +++---
 airflow/executors/local_executor.py                       |  4 +++-
 airflow/hooks/druid_hook.py                               |  8 ++++----
 airflow/hooks/oracle_hook.py                              | 10 +++++-----
 airflow/jobs.py                                           |  2 +-
 airflow/minihivecluster.py                                |  4 +---
 airflow/models.py                                         |  2 +-
 airflow/security/kerberos.py                              |  4 ++--
 airflow/utils/db.py                                       |  4 ++--
 airflow/www/__init__.py                                   |  2 +-
 airflow/www/views.py                                      |  4 ++--
 19 files changed, 41 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/contrib/auth/backends/github_enterprise_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/github_enterprise_auth.py b/airflow/contrib/auth/backends/github_enterprise_auth.py
index 61ae28d..442f582 100644
--- a/airflow/contrib/auth/backends/github_enterprise_auth.py
+++ b/airflow/contrib/auth/backends/github_enterprise_auth.py
@@ -221,5 +221,6 @@ class GHEAuthBackend(object):
 
 login_manager = GHEAuthBackend()
 
+
 def login(self, request):
     return login_manager.login(request)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/contrib/auth/backends/ldap_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/ldap_auth.py b/airflow/contrib/auth/backends/ldap_auth.py
index 083d3ab..f06c7df 100644
--- a/airflow/contrib/auth/backends/ldap_auth.py
+++ b/airflow/contrib/auth/backends/ldap_auth.py
@@ -177,8 +177,8 @@ class LdapUser(models.User):
 
         conn.unbind()
 
-        if not 'dn' in entry:
-            # The search fitler for the user did not return any values, so an
+        if 'dn' not in entry:
+            # The search filter for the user did not return any values, so an
             # invalid user was used for credentials.
             raise AuthenticationError("Invalid username or password")
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/contrib/hooks/bigquery_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/bigquery_hook.py b/airflow/contrib/hooks/bigquery_hook.py
index 0a33290..2d1b243 100644
--- a/airflow/contrib/hooks/bigquery_hook.py
+++ b/airflow/contrib/hooks/bigquery_hook.py
@@ -301,10 +301,10 @@ class BigQueryBaseCursor(object):
                 self._split_project_dataset_table_input(
                     'source_project_dataset_table', source_project_dataset_table)
             source_project_dataset_tables_fixup.append({
-                    'projectId': source_project,
-                    'datasetId': source_dataset,
-                    'tableId': source_table
-                })
+                'projectId': source_project,
+                'datasetId': source_dataset,
+                'tableId': source_table
+            })
 
         assert 3 == len(destination_project_dataset_table.split('.')), (
             'Expected destination_project_dataset_table in the format of '

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/contrib/hooks/qubole_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/qubole_hook.py b/airflow/contrib/hooks/qubole_hook.py
index 241d8dd..57d00b5 100755
--- a/airflow/contrib/hooks/qubole_hook.py
+++ b/airflow/contrib/hooks/qubole_hook.py
@@ -78,7 +78,7 @@ class QuboleHook(BaseHook):
             self.cmd = self.cls.find(self.cmd.id)
             logging.info("Command Id: {0} and Status: {1}".format(str(self.cmd.id), self.cmd.status))
 
-        if self.kwargs.has_key('fetch_logs') and self.kwargs['fetch_logs'] == True:
+        if 'fetch_logs' in self.kwargs and self.kwargs['fetch_logs'] is True:
             logging.info("Logs for Command Id: {0} \n{1}".format(str(self.cmd.id), self.cmd.get_log()))
 
         if self.cmd.status != 'done':

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/contrib/hooks/ssh_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/ssh_hook.py b/airflow/contrib/hooks/ssh_hook.py
index 6745353..f35e909 100755
--- a/airflow/contrib/hooks/ssh_hook.py
+++ b/airflow/contrib/hooks/ssh_hook.py
@@ -73,7 +73,7 @@ class SSHHook(BaseHook):
         if self.sshpass:
             connection_cmd = ["sshpass", "-e"] + connection_cmd
         else:
-            connection_cmd += ["-o", "BatchMode=yes"] # no password prompts
+            connection_cmd += ["-o", "BatchMode=yes"]  # no password prompts
 
         if self.conn.port:
             connection_cmd += ["-p", str(self.conn.port)]
@@ -142,8 +142,7 @@ class SSHHook(BaseHook):
         """
         tunnel_host = "{0}:{1}:{2}".format(local_port, remote_host, remote_port)
         proc = self.Popen(["-L", tunnel_host, "echo -n ready && cat"],
-                           stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-                           )
+                          stdin=subprocess.PIPE, stdout=subprocess.PIPE)
 
         ready = proc.stdout.read(5)
         assert ready == b"ready", "Did not get 'ready' from remote"

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/contrib/operators/mysql_to_gcs.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/mysql_to_gcs.py b/airflow/contrib/operators/mysql_to_gcs.py
index d35caf5..1a2312f 100644
--- a/airflow/contrib/operators/mysql_to_gcs.py
+++ b/airflow/contrib/operators/mysql_to_gcs.py
@@ -77,7 +77,7 @@ class MySqlToGoogleCloudStorageOperator(BaseOperator):
             delegation enabled.
         """
         super(MySqlToGoogleCloudStorageOperator, self).__init__(*args, **kwargs)
-        self.sql = sql;
+        self.sql = sql
         self.bucket = bucket
         self.filename = filename
         self.schema_filename = schema_filename
@@ -125,7 +125,7 @@ class MySqlToGoogleCloudStorageOperator(BaseOperator):
         schema = map(lambda schema_tuple: schema_tuple[0], cursor.description)
         file_no = 0
         tmp_file_handle = NamedTemporaryFile(delete=True)
-        tmp_file_handles = { self.filename.format(file_no): tmp_file_handle }
+        tmp_file_handles = {self.filename.format(file_no): tmp_file_handle}
 
         for row in cursor:
             # Convert datetime objects to utc seconds, and decimals to floats

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/contrib/operators/sqoop_operator.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/sqoop_operator.py b/airflow/contrib/operators/sqoop_operator.py
index 8bf1c05..d483e37 100644
--- a/airflow/contrib/operators/sqoop_operator.py
+++ b/airflow/contrib/operators/sqoop_operator.py
@@ -43,7 +43,7 @@ class SqoopOperator(BaseOperator):
                  **kwargs):
         """
         :param conn_id: str
-	:param type_cmd: str specify command to execute "export" or "import"
+        :param type_cmd: str specify command to execute "export" or "import"
         :param table: Table to read
         :param target_dir: HDFS destination dir
         :param append: Append data to an existing dataset in HDFS

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/example_dags/example_passing_params_via_test_command.py
----------------------------------------------------------------------
diff --git a/airflow/example_dags/example_passing_params_via_test_command.py b/airflow/example_dags/example_passing_params_via_test_command.py
index 845f474..e337f3b 100644
--- a/airflow/example_dags/example_passing_params_via_test_command.py
+++ b/airflow/example_dags/example_passing_params_via_test_command.py
@@ -20,7 +20,7 @@ from airflow.operators.bash_operator import BashOperator
 from airflow.operators.python_operator import PythonOperator
 
 dag = DAG("example_passing_params_via_test_command",
-          default_args={"owner" : "airflow",
+          default_args={"owner": "airflow",
                         "start_date":datetime.now()},
           schedule_interval='*/1 * * * *',
           dagrun_timeout=timedelta(minutes=4)
@@ -33,9 +33,9 @@ def my_py_command(ds, **kwargs):
     # -tp '{"foo":"bar"}'`
     if kwargs["test_mode"]:
         print(" 'foo' was passed in via test={} command : kwargs[params][foo] \
-               = {}".format( kwargs["test_mode"], kwargs["params"]["foo"]) )
+               = {}".format(kwargs["test_mode"], kwargs["params"]["foo"]))
     # Print out the value of "miff", passed in below via the Python Operator
-    print(" 'miff' was passed in via task params = {}".format( kwargs["params"]["miff"]) )
+    print(" 'miff' was passed in via task params = {}".format(kwargs["params"]["miff"]))
     return 1
 
 my_templated_command = """

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/example_dags/subdags/subdag.py
----------------------------------------------------------------------
diff --git a/airflow/example_dags/subdags/subdag.py b/airflow/example_dags/subdags/subdag.py
index 82e1dd1..eee28d8 100644
--- a/airflow/example_dags/subdags/subdag.py
+++ b/airflow/example_dags/subdags/subdag.py
@@ -18,9 +18,9 @@ from airflow.operators.dummy_operator import DummyOperator
 
 def subdag(parent_dag_name, child_dag_name, args):
     dag_subdag = DAG(
-      dag_id='%s.%s' % (parent_dag_name, child_dag_name),
-      default_args=args,
-      schedule_interval="@daily",
+        dag_id='%s.%s' % (parent_dag_name, child_dag_name),
+        default_args=args,
+        schedule_interval="@daily",
     )
 
     for i in range(5):

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/executors/local_executor.py
----------------------------------------------------------------------
diff --git a/airflow/executors/local_executor.py b/airflow/executors/local_executor.py
index 5447bfd..a58c3d7 100644
--- a/airflow/executors/local_executor.py
+++ b/airflow/executors/local_executor.py
@@ -84,7 +84,9 @@ class LocalExecutor(BaseExecutor):
 
     def end(self):
         # Sending poison pill to all worker
-        [self.queue.put((None, None)) for w in self.workers]
+        for _ in self.workers:
+            self.queue.put((None, None))
+
         # Wait for commands to finish
         self.queue.join()
         self.sync()

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/hooks/druid_hook.py
----------------------------------------------------------------------
diff --git a/airflow/hooks/druid_hook.py b/airflow/hooks/druid_hook.py
index 3529375..5fc5eec 100644
--- a/airflow/hooks/druid_hook.py
+++ b/airflow/hooks/druid_hook.py
@@ -122,10 +122,10 @@ class DruidHook(BaseHook):
                         "mapreduce.map.output.compress": "false",
                         "mapreduce.output.fileoutputformat.compress": "false",
                     },
-                    "partitionsSpec" : {
-                        "type" : "hashed",
-                        "targetPartitionSize" : target_partition_size,
-                        "numShards" : num_shards,
+                    "partitionsSpec": {
+                        "type": "hashed",
+                        "targetPartitionSize": target_partition_size,
+                        "numShards": num_shards,
                     },
                 },
                 "ioConfig": {

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/hooks/oracle_hook.py
----------------------------------------------------------------------
diff --git a/airflow/hooks/oracle_hook.py b/airflow/hooks/oracle_hook.py
index 3ff50bf..6556ce7 100644
--- a/airflow/hooks/oracle_hook.py
+++ b/airflow/hooks/oracle_hook.py
@@ -83,7 +83,7 @@ class OracleHook(DbApiHook):
                     l.append("'" + str(cell).replace("'", "''") + "'")
                 elif cell is None:
                     l.append('NULL')
-                elif type(cell) == float and numpy.isnan(cell): #coerce numpy NaN to NULL
+                elif type(cell) == float and numpy.isnan(cell):  # coerce numpy NaN to NULL
                     l.append('NULL')
                 elif isinstance(cell, numpy.datetime64):
                     l.append("'" + str(cell) + "'")
@@ -110,10 +110,10 @@ class OracleHook(DbApiHook):
         cursor = conn.cursor()
         values = ', '.join(':%s' % i for i in range(1, len(target_fields) + 1))
         prepared_stm = 'insert into {tablename} ({columns}) values ({values})'.format(
-                tablename=table,
-                columns=', '.join(target_fields),
-                values=values
-            )
+            tablename=table,
+            columns=', '.join(target_fields),
+            values=values,
+        )
         row_count = 0
         # Chunk the rows
         row_chunk = []

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/jobs.py
----------------------------------------------------------------------
diff --git a/airflow/jobs.py b/airflow/jobs.py
index a0749c0..77f34ee 100644
--- a/airflow/jobs.py
+++ b/airflow/jobs.py
@@ -296,7 +296,7 @@ class SchedulerJob(BaseJob):
         slas = (
             session
             .query(SlaMiss)
-            .filter(SlaMiss.email_sent == False or SlaMiss.notification_sent == False)
+            .filter(SlaMiss.email_sent.is_(False) or SlaMiss.notification_sent.is_(False))
             .filter(SlaMiss.dag_id == dag.dag_id)
             .all()
         )

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/minihivecluster.py
----------------------------------------------------------------------
diff --git a/airflow/minihivecluster.py b/airflow/minihivecluster.py
index f76169c..b975f27 100644
--- a/airflow/minihivecluster.py
+++ b/airflow/minihivecluster.py
@@ -40,9 +40,7 @@ class MiniHiveCluster(object):
             rlist, wlist, xlist = select.select([self.hive.stderr, self.hive.stdout], [], [])
             for f in rlist:
                 line = f.readline()
-                print (line,)
+                print(line,)
                 m = re.match(".*Starting ThriftBinaryCLIService", line)
                 if m:
                     return True
-
-

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/models.py
----------------------------------------------------------------------
diff --git a/airflow/models.py b/airflow/models.py
index 2736076..6e6d6fc 100644
--- a/airflow/models.py
+++ b/airflow/models.py
@@ -3420,7 +3420,7 @@ class DagRun(Base):
             external_trigger=self.external_trigger)
 
     @classmethod
-    def id_for_date(klass, date, prefix=ID_FORMAT_PREFIX):
+    def id_for_date(cls, date, prefix=ID_FORMAT_PREFIX):
         return prefix.format(date.isoformat()[:19])
 
     @provide_session

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/security/kerberos.py
----------------------------------------------------------------------
diff --git a/airflow/security/kerberos.py b/airflow/security/kerberos.py
index ce336c4..9d60059 100644
--- a/airflow/security/kerberos.py
+++ b/airflow/security/kerberos.py
@@ -35,7 +35,7 @@ def renew_from_kt():
     principal = configuration.get('kerberos', 'principal').replace("_HOST", socket.getfqdn())
     cmdv = [configuration.get('kerberos', 'kinit_path'),
             "-r", renewal_lifetime,
-            "-k", # host ticket
+            "-k",  # host ticket
             "-t", configuration.get('kerberos', 'keytab'),   # specify keytab
             "-c", configuration.get('kerberos', 'ccache'),   # specify credentials cache
             principal]
@@ -109,4 +109,4 @@ def run():
 
     while True:
         renew_from_kt()
-        time.sleep(configuration.getint('kerberos', 'reinit_frequency'))
\ No newline at end of file
+        time.sleep(configuration.getint('kerberos', 'reinit_frequency'))

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/utils/db.py
----------------------------------------------------------------------
diff --git a/airflow/utils/db.py b/airflow/utils/db.py
index 849cb39..fda467d 100644
--- a/airflow/utils/db.py
+++ b/airflow/utils/db.py
@@ -95,8 +95,8 @@ def checkout(dbapi_connection, connection_record, connection_proxy):
         connection_record.connection = connection_proxy.connection = None
         raise exc.DisconnectionError(
             "Connection record belongs to pid {}, "
-            "attempting to check out in pid {}".format(
-            connection_record.info['pid'], pid))
+            "attempting to check out in pid {}".format(connection_record.info['pid'], pid)
+        )
 
 
 def initdb():

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/www/__init__.py
----------------------------------------------------------------------
diff --git a/airflow/www/__init__.py b/airflow/www/__init__.py
index a84b6da..759b563 100644
--- a/airflow/www/__init__.py
+++ b/airflow/www/__init__.py
@@ -11,4 +11,4 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
\ No newline at end of file
+#

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/dc1bdf62/airflow/www/views.py
----------------------------------------------------------------------
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 17f99d3..7cc4546 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -2172,7 +2172,7 @@ class ConnectionModelView(wwwutils.SuperUserMixin, AirflowModelView):
     # extra__{conn_type}__name. You can also hide form elements and rename
     # others from the connection_form.js file
     form_extra_fields = {
-        'extra__jdbc__drv_path' : StringField('Driver Path'),
+        'extra__jdbc__drv_path': StringField('Driver Path'),
         'extra__jdbc__drv_clsname': StringField('Driver Class'),
         'extra__google_cloud_platform__project': StringField('Project Id'),
         'extra__google_cloud_platform__key_path': StringField('Keyfile Path'),
@@ -2223,7 +2223,7 @@ class ConnectionModelView(wwwutils.SuperUserMixin, AirflowModelView):
         return fk is None
 
     @classmethod
-    def is_secure(self):
+    def is_secure(cls):
         """
         Used to display a message in the Connection list view making it clear
         that the passwords and `extra` field can't be encrypted.