You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2020/08/22 08:55:47 UTC
[airflow] branch master updated: Replace assigment with Augmented
assignment (#10468)
This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/master by this push:
new 7c206a8 Replace assigment with Augmented assignment (#10468)
7c206a8 is described below
commit 7c206a82a6f074abcc4898a005ecd2c84a920054
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Sat Aug 22 09:54:57 2020 +0100
Replace assigment with Augmented assignment (#10468)
---
airflow/providers/amazon/aws/hooks/datasync.py | 2 +-
airflow/providers/amazon/aws/hooks/logs.py | 2 +-
airflow/providers/amazon/aws/hooks/sagemaker.py | 4 ++--
airflow/providers/apache/druid/hooks/druid.py | 2 +-
airflow/providers/apache/hive/hooks/hive.py | 2 +-
airflow/providers/apache/spark/hooks/spark_submit.py | 2 +-
airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py | 4 ++--
airflow/providers/google/cloud/hooks/bigquery.py | 4 ++--
airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py | 2 +-
airflow/providers/jenkins/operators/jenkins_job_trigger.py | 2 +-
airflow/providers/oracle/transfers/oracle_to_oracle.py | 2 +-
airflow/providers/singularity/operators/singularity.py | 4 ++--
12 files changed, 16 insertions(+), 16 deletions(-)
diff --git a/airflow/providers/amazon/aws/hooks/datasync.py b/airflow/providers/amazon/aws/hooks/datasync.py
index b9a786e..153a75f 100644
--- a/airflow/providers/amazon/aws/hooks/datasync.py
+++ b/airflow/providers/amazon/aws/hooks/datasync.py
@@ -303,7 +303,7 @@ class AWSDataSyncHook(AwsBaseHook):
)
status = task_execution["Status"]
self.log.info("status=%s", status)
- iterations = iterations - 1
+ iterations -= 1
if status in self.TASK_EXECUTION_FAILURE_STATES:
break
if status in self.TASK_EXECUTION_SUCCESS_STATES:
diff --git a/airflow/providers/amazon/aws/hooks/logs.py b/airflow/providers/amazon/aws/hooks/logs.py
index 9e3d02a..f8c536a 100644
--- a/airflow/providers/amazon/aws/hooks/logs.py
+++ b/airflow/providers/amazon/aws/hooks/logs.py
@@ -84,7 +84,7 @@ class AwsLogsHook(AwsBaseHook):
events = events[skip:]
skip = 0
else:
- skip = skip - event_count
+ skip -= event_count
events = []
yield from events
diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py b/airflow/providers/amazon/aws/hooks/sagemaker.py
index 8aefa29..bb65a55 100644
--- a/airflow/providers/amazon/aws/hooks/sagemaker.py
+++ b/airflow/providers/amazon/aws/hooks/sagemaker.py
@@ -672,7 +672,7 @@ class SageMakerHook(AwsBaseHook): # pylint: disable=too-many-public-methods
while running:
time.sleep(check_interval)
- sec = sec + check_interval
+ sec += check_interval
try:
response = describe_function(job_name)
@@ -761,7 +761,7 @@ class SageMakerHook(AwsBaseHook): # pylint: disable=too-many-public-methods
while True:
time.sleep(check_interval)
- sec = sec + check_interval
+ sec += check_interval
state, last_description, last_describe_job_call = \
self.describe_training_job_with_log(job_name, positions, stream_names,
diff --git a/airflow/providers/apache/druid/hooks/druid.py b/airflow/providers/apache/druid/hooks/druid.py
index c55ad11..3dbc5b9 100644
--- a/airflow/providers/apache/druid/hooks/druid.py
+++ b/airflow/providers/apache/druid/hooks/druid.py
@@ -120,7 +120,7 @@ class DruidHook(BaseHook):
time.sleep(self.timeout)
- sec = sec + self.timeout
+ sec += self.timeout
status = req_status.json()['status']['status']
if status == 'RUNNING':
diff --git a/airflow/providers/apache/hive/hooks/hive.py b/airflow/providers/apache/hive/hooks/hive.py
index 0bee2a7..8e577a3 100644
--- a/airflow/providers/apache/hive/hooks/hive.py
+++ b/airflow/providers/apache/hive/hooks/hive.py
@@ -211,7 +211,7 @@ class HiveCliHook(BaseHook):
with TemporaryDirectory(prefix='airflow_hiveop_') as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir) as f:
- hql = hql + '\n'
+ hql += '\n'
f.write(hql.encode('UTF-8'))
f.flush()
hive_cmd = self._prepare_cli_cmd()
diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/airflow/providers/apache/spark/hooks/spark_submit.py
index 285dfac..24f7fc9 100644
--- a/airflow/providers/apache/spark/hooks/spark_submit.py
+++ b/airflow/providers/apache/spark/hooks/spark_submit.py
@@ -587,7 +587,7 @@ class SparkSubmitHook(BaseHook, LoggingMixin):
if returncode:
if missed_job_status_reports < max_missed_job_status_reports:
- missed_job_status_reports = missed_job_status_reports + 1
+ missed_job_status_reports += 1
else:
raise AirflowException(
"Failed to poll for the driver status {} times: returncode = {}"
diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
index 01017cd..2f13b88 100644
--- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
+++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
@@ -317,11 +317,11 @@ class KubernetesPodOperator(BaseOperator): # pylint: disable=too-many-instance-
log_line = "found a running pod with labels {} but a different try_number.".format(labels)
if self.reattach_on_restart:
- log_line = log_line + " Will attach to this pod and monitor instead of starting new one"
+ log_line += " Will attach to this pod and monitor instead of starting new one"
self.log.info(log_line)
final_state, result = self.monitor_launched_pod(launcher, pod_list.items[0])
else:
- log_line = log_line + "creating pod with labels {} and launcher {}".format(labels, launcher)
+ log_line += "creating pod with labels {} and launcher {}".format(labels, launcher)
self.log.info(log_line)
final_state, _, result = self.create_new_pod_for_operator(labels, launcher)
return final_state, result
diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py
index 6f69ff5..e37e2ef 100644
--- a/airflow/providers/google/cloud/hooks/bigquery.py
+++ b/airflow/providers/google/cloud/hooks/bigquery.py
@@ -1165,7 +1165,7 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
'Granting table %s:%s.%s authorized view access to %s:%s dataset.',
view_project, view_dataset, view_table, project_id, source_dataset
)
- dataset.access_entries = dataset.access_entries + [view_access]
+ dataset.access_entries += [view_access]
dataset = self.update_dataset(
fields=["access"],
dataset_resource=dataset.to_api_repr(),
@@ -1423,7 +1423,7 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
job_complete = False
while polling_attempts < max_polling_attempts and not job_complete:
- polling_attempts = polling_attempts + 1
+ polling_attempts += 1
job_complete = self.poll_job_complete(job_id)
if job_complete:
self.log.info('Job successfully canceled: %s, %s', project_id, job_id)
diff --git a/airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py b/airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py
index 5a711b2..6dc5035 100644
--- a/airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py
+++ b/airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py
@@ -61,7 +61,7 @@ with DAG(
# The JenkinsJobTriggerOperator store the job url in the xcom variable corresponding to the task
# You can then use it to access things or to get the job number
# This url looks like : http://jenkins_url/job/job_name/job_number/
- url = url + "artifact/myartifact.xml" # Or any other artifact name
+ url += "artifact/myartifact.xml" # Or any other artifact name
request = Request(url)
response = jenkins_server.jenkins_open(request)
return response # We store the artifact content in a xcom variable for later use
diff --git a/airflow/providers/jenkins/operators/jenkins_job_trigger.py b/airflow/providers/jenkins/operators/jenkins_job_trigger.py
index 471abd4..9e38ede 100644
--- a/airflow/providers/jenkins/operators/jenkins_job_trigger.py
+++ b/airflow/providers/jenkins/operators/jenkins_job_trigger.py
@@ -160,7 +160,7 @@ class JenkinsJobTriggerOperator(BaseOperator):
:return: The build_number corresponding to the triggered job
"""
try_count = 0
- location = location + '/api/json'
+ location += '/api/json'
# TODO Use get_queue_info instead
# once it will be available in python-jenkins (v > 0.4.15)
self.log.info('Polling jenkins queue at the url %s', location)
diff --git a/airflow/providers/oracle/transfers/oracle_to_oracle.py b/airflow/providers/oracle/transfers/oracle_to_oracle.py
index 64dbe7a..64ed4c7 100644
--- a/airflow/providers/oracle/transfers/oracle_to_oracle.py
+++ b/airflow/providers/oracle/transfers/oracle_to_oracle.py
@@ -75,7 +75,7 @@ class OracleToOracleOperator(BaseOperator):
rows_total = 0
rows = cursor.fetchmany(self.rows_chunk)
while len(rows) > 0:
- rows_total = rows_total + len(rows)
+ rows_total += len(rows)
dest_hook.bulk_insert_rows(self.destination_table, rows,
target_fields=target_fields,
commit_every=self.rows_chunk)
diff --git a/airflow/providers/singularity/operators/singularity.py b/airflow/providers/singularity/operators/singularity.py
index 759398a..17d1749 100644
--- a/airflow/providers/singularity/operators/singularity.py
+++ b/airflow/providers/singularity/operators/singularity.py
@@ -118,11 +118,11 @@ class SingularityOperator(BaseOperator):
# Prepare list of binds
for bind in self.volumes:
- self.options = self.options + ['--bind', bind]
+ self.options += ['--bind', bind]
# Does the user want a custom working directory?
if self.working_dir is not None:
- self.options = self.options + ['--workdir', self.working_dir]
+ self.options += ['--workdir', self.working_dir]
# Export environment before instance is run
for enkey, envar in self.environment.items():