You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by di...@apache.org on 2020/11/25 22:37:49 UTC

[airflow] branch fix-resource-backcompat created (now 84eecf9)

This is an automated email from the ASF dual-hosted git repository.

dimberman pushed a change to branch fix-resource-backcompat
in repository https://gitbox.apache.org/repos/asf/airflow.git.


      at 84eecf9  Fix issue with empty Resources in executor_config

This branch includes the following new commits:

     new 483386a  Sync FAB Permissions for all base views (#12162)
     new 21c08b8  Add 1.10.13 Changelog
     new 0aeb51f  Show Generic Error for Charts & Query View in old UI (#12495)
     new e2fe0b7  TimeSensor should respect the default_timezone config (#9699)
     new bbcbb71  TimeSensor should respect DAG timezone (#9882)
     new 5fac1e1  merge invalid_kubernetes_config rule and pod_template_file rule
     new 84eecf9  Fix issue with empty Resources in executor_config

The 7 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[airflow] 06/07: merge invalid_kubernetes_config rule and pod_template_file rule

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimberman pushed a commit to branch fix-resource-backcompat
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 5fac1e15c702faa9606c22390bf9b143193dfc79
Author: Daniel Imberman <da...@gmail.com>
AuthorDate: Fri Nov 20 07:19:10 2020 -0800

    merge invalid_kubernetes_config rule and pod_template_file rule
---
 .../upgrade/rules/invalid_kubernetes_configs.py    | 69 ----------------------
 airflow/upgrade/rules/pod_template_file_rule.py    | 38 +++++++++++-
 2 files changed, 37 insertions(+), 70 deletions(-)

diff --git a/airflow/upgrade/rules/invalid_kubernetes_configs.py b/airflow/upgrade/rules/invalid_kubernetes_configs.py
deleted file mode 100644
index 0850501..0000000
--- a/airflow/upgrade/rules/invalid_kubernetes_configs.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from __future__ import absolute_import
-
-from airflow.upgrade.rules.base_rule import BaseRule
-from airflow.configuration import conf
-
-invalid_keys = {"airflow_configmap",
-                "airflow_local_settings_configmap",
-                "dags_in_image",
-                "dags_volume_subpath",
-                "dags_volume_mount_point", "dags_volume_claim",
-                "logs_volume_subpath", "logs_volume_claim",
-                "dags_volume_host", "logs_volume_host",
-                "env_from_configmap_ref", "env_from_secret_ref", "git_repo",
-                "git_branch", "git_sync_depth", "git_subpath",
-                "git_sync_rev", "git_user", "git_password",
-                "git_sync_root", "git_sync_dest",
-                "git_dags_folder_mount_point", "git_ssh_key_secret_name",
-                "git_ssh_known_hosts_configmap_name", "git_sync_credentials_secret",
-                "git_sync_container_repository",
-                "git_sync_container_tag", "git_sync_init_container_name",
-                "git_sync_run_as_user",
-                "worker_service_account_name", "image_pull_secrets",
-                "gcp_service_account_keys", "affinity",
-                "tolerations", "run_as_user", "fs_group"}
-
-
-class InvalidKubernetesConfigRule(BaseRule):
-
-    title = "Users must delete deprecated configs for KubernetesExecutor"
-
-    description = """\
-In Airflow 2.0, KubernetesExecutor Users need to set a pod_template_file as a base
-value for all pods launched by the KubernetesExecutor. Many Kubernetes configs are no longer
-needed once this pod_template_file has been generated.
-"""
-
-    def check(self):
-        conf_dict = conf.as_dict(display_sensitive=True)
-        kube_conf = conf_dict['kubernetes']
-        keys = kube_conf.keys()
-        resp = [k for k in keys if k in invalid_keys]
-        if conf_dict['kubernetes_labels']:
-            resp.append("kubernetes_labels")
-        if conf_dict['kubernetes_secrets']:
-            resp.append("kubernetes_secrets")
-
-        if resp:
-            resp_string = "\n".join(resp)
-            return "The following invalid keys were found in your airflow.cfg: \
-                   \n\n{resp_string}\n\n \
-                   Please generate a pod_template_file by running `airflow generate_pod_template` \
-                   and delete these keys.".format(resp_string=resp_string)
diff --git a/airflow/upgrade/rules/pod_template_file_rule.py b/airflow/upgrade/rules/pod_template_file_rule.py
index 21363d3..5b9c795 100644
--- a/airflow/upgrade/rules/pod_template_file_rule.py
+++ b/airflow/upgrade/rules/pod_template_file_rule.py
@@ -20,6 +20,25 @@ from __future__ import absolute_import
 from airflow.upgrade.rules.base_rule import BaseRule
 from airflow.configuration import conf
 
+invalid_config_keys = {"airflow_configmap",
+                "airflow_local_settings_configmap",
+                "dags_in_image",
+                "dags_volume_subpath",
+                "dags_volume_mount_point", "dags_volume_claim",
+                "logs_volume_subpath", "logs_volume_claim",
+                "dags_volume_host", "logs_volume_host",
+                "env_from_configmap_ref", "env_from_secret_ref", "git_repo",
+                "git_branch", "git_sync_depth", "git_subpath",
+                "git_sync_rev", "git_user", "git_password",
+                "git_sync_root", "git_sync_dest",
+                "git_dags_folder_mount_point", "git_ssh_key_secret_name",
+                "git_ssh_known_hosts_configmap_name", "git_sync_credentials_secret",
+                "git_sync_container_repository",
+                "git_sync_container_tag", "git_sync_init_container_name",
+                "git_sync_run_as_user",
+                "worker_service_account_name", "image_pull_secrets",
+                "gcp_service_account_keys", "affinity",
+                "tolerations", "run_as_user", "fs_group"}
 
 class PodTemplateFileRule(BaseRule):
 
@@ -27,7 +46,8 @@ class PodTemplateFileRule(BaseRule):
 
     description = """\
 In Airflow 2.0, KubernetesExecutor Users need to set a pod_template_file as a base
-value for all pods launched by the KubernetesExecutor
+value for all pods launched by the KubernetesExecutor. Many Kubernetes configs are no longer
+needed once this pod_template_file has been generated.
 """
 
     def check(self):
@@ -37,3 +57,19 @@ value for all pods launched by the KubernetesExecutor
                 "Please create a pod_template_file by running `airflow generate_pod_template`.\n"
                 "This will generate a pod using your aiflow.cfg settings"
             )
+
+        conf_dict = conf.as_dict(display_sensitive=True)
+        kube_conf = conf_dict['kubernetes']
+        keys = kube_conf.keys()
+        resp = [k for k in keys if k in invalid_config_keys]
+        if conf_dict['kubernetes_labels']:
+            resp.append("kubernetes_labels")
+        if conf_dict['kubernetes_secrets']:
+            resp.append("kubernetes_secrets")
+
+        if resp:
+            resp_string = "\n".join(resp)
+            return "The following invalid keys were found in your airflow.cfg: \
+                   \n\n{resp_string}\n\n \
+                   Now that you have a pod_template_file, these keys no longer do anything.\n\
+                   Please delete these keys.".format(resp_string=resp_string)


[airflow] 02/07: Add 1.10.13 Changelog

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimberman pushed a commit to branch fix-resource-backcompat
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 21c08b8642873623258b5a3142ec9dd5a96047a0
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Thu Nov 19 21:28:11 2020 +0000

    Add 1.10.13 Changelog
---
 CHANGELOG.txt | 101 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 101 insertions(+)

diff --git a/CHANGELOG.txt b/CHANGELOG.txt
index 4fb12de..92695fb 100644
--- a/CHANGELOG.txt
+++ b/CHANGELOG.txt
@@ -1,3 +1,103 @@
+Airflow 1.10.13, 2020-11-24
+----------------------------
+
+New Features
+""""""""""""
+
+- Add "already checked" to failed pods in K8sPodOperator (#11368)
+- Pass SQLAlchemy engine options to FAB based UI (#11395)
+- [AIRFLOW-4438] Add Gzip compression to S3_hook (#8571)
+- Add permission "extra_links" for Viewer role and above (#10719)
+- Add generate_yaml command to easily test KubernetesExecutor before deploying pods (#10677)
+- Add Secrets backend for Microsoft Azure Key Vault (#10898)
+
+Bug Fixes
+"""""""""
+
+- SkipMixin: Handle empty branches (#11120)
+- [AIRFLOW-5274] dag loading duration metric name too long (#5890)
+- Handle no Dagrun in DagrunIdDep (#8389) (#11343)
+- Fix Kubernetes Executor logs for long dag names (#10942)
+- Add on_kill support for the KubernetesPodOperator (#10666)
+- KubernetesPodOperator template fix (#10963)
+- Fix displaying of add serialized_dag table migration
+- Fix Start Date tooltip on DAGs page (#10637)
+- URL encode execution date in the Last Run link (#10595)
+- Fixes issue with affinity backcompat in Airflow 1.10
+- Fix KubernetesExecutor import in views.py
+- Fix issues with Gantt View (#12419)
+- Fix Entrypoint and _CMD config variables (#12411)
+- Fix operator field update for SerializedBaseOperator (#10924)
+- Limited cryptography to < 3.2 for python 2.7
+- Install cattr on Python 3.7 - Fix docs build on RTD (#12045)
+- Limit version of marshmallow-sqlalchemy
+- Pin `kubernetes` to a max version of 11.0.0 (#11974)
+- Use snakebite-py3 for HDFS dependency for Python3 (#12340)
+- Removes snakebite kerberos dependency (#10865)
+- Fix failing dependencies for FAB and Celery (#10828)
+- Fix pod_mutation_hook for 1.10.13 (#10850)
+- Fix formatting of Host information
+- Fix Logout Google Auth issue in Non-RBAC UI (#11890)
+- Add missing imports to app.py (#10650)
+
+Improvements
+""""""""""""
+
+- Add XCom.deserialize_value to Airflow 1.10.13 (#12328)
+- Mount airflow.cfg to pod_template_file (#12311)
+- All k8s object must comply with JSON Schema (#12003)
+- Validate airflow chart values.yaml & values.schema.json (#11990)
+- Pod template file uses custom custom env variable (#11480)
+- Bump attrs and cattrs dependencies (#11969)
+- Bump attrs to > 20.0 (#11799)
+- [AIRFLOW-3607] Only query DB once per DAG run for TriggerRuleDep (#4751)
+- Rename task with duplicate task_id
+- Manage Flask AppBuilder Tables using Alembic Migrations (#12352)
+- ``airflow test`` only works for tasks in 1.10, not whole dags (#11191)
+- Improve warning messaging for duplicate task_ids in a DAG (#11126)
+- Pins moto to 1.3.14 (#10986)
+- DbApiHook: Support kwargs in get_pandas_df (#9730)
+- Make grace_period_seconds option on K8sPodOperator (#10727)
+- Fix syntax error in Dockerfile 'maintainer' Label (#10899)
+- The entrypoints in Docker Image should be owned by Airflow (#10853)
+- Make dockerfiles Google Shell Guide Compliant (#10734)
+- clean-logs script for Dockerfile: trim logs before sleep (#10685)
+- When sending tasks to celery from a sub-process, reset signal handlers (#11278)
+- SkipMixin: Add missing session.commit() and test (#10421)
+- Webserver: Further Sanitize values passed to origin param (#12459)
+- Security upgrade lodash from 4.17.19 to 4.17.20 (#11095)
+- Log instead of raise an Error for unregistered OperatorLinks (#11959)
+- Mask Password in Log table when using the CLI (#11468)
+- [AIRFLOW-3607] Optimize dep checking when depends on past set and concurrency limit
+- Execute job cancel HTTPRequest in Dataproc Hook (#10361)
+- Use rst lexer to format airflow upgrade check output (#11259)
+- Remove deprecation warning from contrib/kubernetes/pod.py
+- adding body as templated field for CloudSqlImportOperator (#10510)
+- Deprecate importing Hooks from plugin-created module (#12133)
+- Deprecate adding Operators and Sensors via plugins (#12069)
+
+Doc only changes
+""""""""""""""""
+
+- [Doc] Correct description for macro task_instance_key_str (#11062)
+- Checks if all the libraries in setup.py are listed in installation.rst file (#12023)
+- Revise "Project Focus" copy (#12011)
+- Move Project focus and Principles higher in the README (#11973)
+- Remove archived link from README.md (#11945)
+- Update download url for Airflow Version (#11800)
+- Add Project URLs for PyPI page (#11801)
+- Move Backport Providers docs to our docsite (#11136)
+- Refactor rebase copy (#11030)
+- Add missing images for kubernetes executor docs (#11083)
+- Fix identation in executor_config example (#10467)
+- Enhanced the Kubernetes Executor doc  (#10433)
+- Refactor content to a markdown table (#10863)
+- Rename "Beyond the Horizon" section and refactor content (#10802)
+- Refactor official source section to use bullets (#10801)
+- Add section for official source code (#10678)
+- Add redbubble link to Airflow merchandise (#10359)
+- README Doc: Link to Airflow directory in ASF Directory (#11137)
+
 Airflow 1.10.12, 2020-08-25
 ----------------------------
 
@@ -36,6 +136,7 @@ Bug Fixes
 - BugFix: K8s Executor Multinamespace mode is evaluated to true by default (#10410)
 - Make KubernetesExecutor recognize kubernetes_labels (#10412)
 - Fix broken Kubernetes PodRuntimeInfoEnv (#10478)
+- Sync FAB Permissions for all base views (#12162)
 
 Improvements
 """"""""""""


[airflow] 04/07: TimeSensor should respect the default_timezone config (#9699)

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimberman pushed a commit to branch fix-resource-backcompat
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit e2fe0b7a7101b24dd3354dc13f5dd1fdac9c0fec
Author: zikun <33...@users.noreply.github.com>
AuthorDate: Sun Jul 19 01:36:28 2020 +0800

    TimeSensor should respect the default_timezone config (#9699)
    
    (cherry picked from commit b34ba874452809495354f3012e0b1dcbf4209e09)
---
 UPDATING.md                    | 7 +++++++
 airflow/sensors/time_sensor.py | 2 +-
 2 files changed, 8 insertions(+), 1 deletion(-)

diff --git a/UPDATING.md b/UPDATING.md
index 2057491..faa75fd 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -63,6 +63,13 @@ https://developers.google.com/style/inclusive-documentation
 -->
 ## Airflow 1.10.13
 
+### TimeSensor will consider default_timezone setting.
+
+Previously `TimeSensor` always compared the `target_time` with the current time in UTC.
+
+Now it will compare `target_time` with the current time in the timezone set by `default_timezone` under the `core`
+section of the config.
+
 ### Removed Kerberos support for HDFS hook
 
 The HDFS hook's Kerberos support has been removed due to removed python-krbV dependency from PyPI
diff --git a/airflow/sensors/time_sensor.py b/airflow/sensors/time_sensor.py
index 0c39235..d26c32d 100644
--- a/airflow/sensors/time_sensor.py
+++ b/airflow/sensors/time_sensor.py
@@ -37,4 +37,4 @@ class TimeSensor(BaseSensorOperator):
 
     def poke(self, context):
         self.log.info('Checking if the time (%s) has come', self.target_time)
-        return timezone.utcnow().time() > self.target_time
+        return timezone.make_naive(timezone.utcnow()).time() > self.target_time


[airflow] 07/07: Fix issue with empty Resources in executor_config

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimberman pushed a commit to branch fix-resource-backcompat
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 84eecf94bab1a8c66b5161f03c6631448fb4850e
Author: Daniel Imberman <da...@gmail.com>
AuthorDate: Wed Nov 25 14:33:51 2020 -0800

    Fix issue with empty Resources in executor_config
    
    Fixes an issue where if a user specifies a request but not a limit in
    resources for the executor_config, backwards compat can not parse
---
 airflow/contrib/kubernetes/pod.py   |   4 +-
 tests/kubernetes/models/test_pod.py | 240 ++++++++++++++++++++++--------------
 2 files changed, 152 insertions(+), 92 deletions(-)

diff --git a/airflow/contrib/kubernetes/pod.py b/airflow/contrib/kubernetes/pod.py
index d1f30a8..7e38147 100644
--- a/airflow/contrib/kubernetes/pod.py
+++ b/airflow/contrib/kubernetes/pod.py
@@ -250,8 +250,8 @@ def _extract_ports(ports):
 
 def _extract_resources(resources):
     if isinstance(resources, k8s.V1ResourceRequirements):
-        requests = resources.requests
-        limits = resources.limits
+        requests = resources.requests or {}
+        limits = resources.limits or {}
         return Resources(
             request_memory=requests.get('memory', None),
             request_cpu=requests.get('cpu', None),
diff --git a/tests/kubernetes/models/test_pod.py b/tests/kubernetes/models/test_pod.py
index f8df28a..6939597 100644
--- a/tests/kubernetes/models/test_pod.py
+++ b/tests/kubernetes/models/test_pod.py
@@ -19,63 +19,88 @@ from tests.compat import mock
 from kubernetes.client import ApiClient
 import kubernetes.client.models as k8s
 from airflow.kubernetes.pod import Port
+from airflow.kubernetes.pod import Resources
+from airflow.contrib.kubernetes.pod import _extract_resources
 from airflow.kubernetes.pod_generator import PodGenerator
 from airflow.kubernetes.k8s_model import append_to_pod
 
 
 class TestPod(unittest.TestCase):
+    def test_extract_resources(self):
+        res = _extract_resources(k8s.V1ResourceRequirements())
+        self.assertEqual(
+            res.to_k8s_client_obj().to_dict(), Resources().to_k8s_client_obj().to_dict()
+        )
+        res = _extract_resources(k8s.V1ResourceRequirements(limits={"memory": "1G"}))
+        self.assertEqual(
+            res.to_k8s_client_obj().to_dict(),
+            Resources(limit_memory="1G").to_k8s_client_obj().to_dict(),
+        )
+        res = _extract_resources(k8s.V1ResourceRequirements(requests={"memory": "1G"}))
+        self.assertEqual(
+            res.to_k8s_client_obj().to_dict(),
+            Resources(request_memory="1G").to_k8s_client_obj().to_dict(),
+        )
+        res = _extract_resources(
+            k8s.V1ResourceRequirements(
+                limits={"memory": "1G"}, requests={"memory": "1G"}
+            )
+        )
+        self.assertEqual(
+            res.to_k8s_client_obj().to_dict(),
+            Resources(limit_memory="1G", request_memory="1G")
+            .to_k8s_client_obj()
+            .to_dict(),
+        )
 
     def test_port_to_k8s_client_obj(self):
-        port = Port('http', 80)
+        port = Port("http", 80)
         self.assertEqual(
             port.to_k8s_client_obj(),
-            k8s.V1ContainerPort(
-                name='http',
-                container_port=80
-            )
+            k8s.V1ContainerPort(name="http", container_port=80),
         )
 
-    @mock.patch('uuid.uuid4')
+    @mock.patch("uuid.uuid4")
     def test_port_attach_to_pod(self, mock_uuid):
         import uuid
-        static_uuid = uuid.UUID('cf4a56d2-8101-4217-b027-2af6216feb48')
+
+        static_uuid = uuid.UUID("cf4a56d2-8101-4217-b027-2af6216feb48")
         mock_uuid.return_value = static_uuid
-        pod = PodGenerator(image='airflow-worker:latest', name='base').gen_pod()
-        ports = [
-            Port('https', 443),
-            Port('http', 80)
-        ]
+        pod = PodGenerator(image="airflow-worker:latest", name="base").gen_pod()
+        ports = [Port("https", 443), Port("http", 80)]
         k8s_client = ApiClient()
         result = append_to_pod(pod, ports)
         result = k8s_client.sanitize_for_serialization(result)
-        self.assertEqual({
-            'apiVersion': 'v1',
-            'kind': 'Pod',
-            'metadata': {'name': 'base-' + static_uuid.hex},
-            'spec': {
-                'containers': [{
-                    'args': [],
-                    'command': [],
-                    'env': [],
-                    'envFrom': [],
-                    'image': 'airflow-worker:latest',
-                    'name': 'base',
-                    'ports': [{
-                        'name': 'https',
-                        'containerPort': 443
-                    }, {
-                        'name': 'http',
-                        'containerPort': 80
-                    }],
-                    'volumeMounts': [],
-                }],
-                'hostNetwork': False,
-                'imagePullSecrets': [],
-                'volumes': []
-            }
-        }, result)
+        self.assertEqual(
+            {
+                "apiVersion": "v1",
+                "kind": "Pod",
+                "metadata": {"name": "base-" + static_uuid.hex},
+                "spec": {
+                    "containers": [
+                        {
+                            "args": [],
+                            "command": [],
+                            "env": [],
+                            "envFrom": [],
+                            "image": "airflow-worker:latest",
+                            "name": "base",
+                            "ports": [
+                                {"name": "https", "containerPort": 443},
+                                {"name": "http", "containerPort": 80},
+                            ],
+                            "volumeMounts": [],
+                        }
+                    ],
+                    "hostNetwork": False,
+                    "imagePullSecrets": [],
+                    "volumes": [],
+                },
+            },
+            result,
+        )
 
-    @mock.patch('uuid.uuid4')
+    @mock.patch("uuid.uuid4")
     def test_to_v1_pod(self, mock_uuid):
         from airflow.contrib.kubernetes.pod import Pod as DeprecatedPod
         from airflow.kubernetes.volume import Volume
@@ -83,7 +108,8 @@ class TestPod(unittest.TestCase):
         from airflow.kubernetes.secret import Secret
         from airflow.kubernetes.pod import Resources
         import uuid
-        static_uuid = uuid.UUID('cf4a56d2-8101-4217-b027-2af6216feb48')
+
+        static_uuid = uuid.UUID("cf4a56d2-8101-4217-b027-2af6216feb48")
         mock_uuid.return_value = static_uuid
 
         pod = DeprecatedPod(
@@ -94,24 +120,26 @@ class TestPod(unittest.TestCase):
             envs={"test_key": "test_value"},
             cmds=["airflow"],
             resources=Resources(
-                request_memory="1G",
-                request_cpu="100Mi",
-                limit_gpu="100G"
+                request_memory="1G", request_cpu="100Mi", limit_gpu="100G"
             ),
             init_containers=k8s.V1Container(
                 name="test-container",
-                volume_mounts=k8s.V1VolumeMount(mount_path="/foo/bar", name="init-volume-secret")
+                volume_mounts=k8s.V1VolumeMount(
+                    mount_path="/foo/bar", name="init-volume-secret"
+                ),
             ),
             volumes=[
                 Volume(name="foo", configs={}),
-                {"name": "bar", 'secret': {'secretName': 'volume-secret'}}
+                {"name": "bar", "secret": {"secretName": "volume-secret"}},
             ],
             secrets=[
                 Secret("volume", None, "init-volume-secret"),
-                Secret('env', "AIRFLOW_SECRET", 'secret_name', "airflow_config"),
-                Secret("volume", "/opt/airflow", "volume-secret", "secret-key")
+                Secret("env", "AIRFLOW_SECRET", "secret_name", "airflow_config"),
+                Secret("volume", "/opt/airflow", "volume-secret", "secret-key"),
+            ],
+            volume_mounts=[
+                VolumeMount(name="foo", mount_path="/mnt", sub_path="/", read_only=True)
             ],
-            volume_mounts=[VolumeMount(name="foo", mount_path="/mnt", sub_path="/", read_only=True)]
         )
 
         k8s_client = ApiClient()
@@ -119,47 +147,79 @@ class TestPod(unittest.TestCase):
         result = pod.to_v1_kubernetes_pod()
         result = k8s_client.sanitize_for_serialization(result)
 
-        expected = \
-            {'metadata': {'annotations': {},
-                          'labels': {},
-                          'name': 'bar',
-                          'namespace': 'baz'},
-             'spec': {'affinity': {},
-                      'containers': [{'args': [],
-                                      'command': ['airflow'],
-                                      'env': [{'name': 'test_key', 'value': 'test_value'},
-                                              {'name': 'AIRFLOW_SECRET',
-                                               'valueFrom': {'secretKeyRef': {'key': 'airflow_config',
-                                                                              'name': 'secret_name'}}}],
-                                      'envFrom': [],
-                                      'image': 'foo',
-                                      'imagePullPolicy': 'Never',
-                                      'name': 'base',
-                                      'resources': {'limits': {'nvidia.com/gpu': '100G'},
-                                                    'requests': {'cpu': '100Mi',
-                                                                 'memory': '1G'}},
-                                      'volumeMounts': [{'mountPath': '/mnt',
-                                                        'name': 'foo',
-                                                        'readOnly': True,
-                                                        'subPath': '/'},
-                                                       {'mountPath': '/opt/airflow',
-                                                        'name': 'secretvol' + str(static_uuid),
-                                                        'readOnly': True}]}],
-                      'hostNetwork': False,
-                      'imagePullSecrets': [],
-                      'initContainers': {'name': 'test-container',
-                                         'volumeMounts': {'mountPath': '/foo/bar',
-                                                          'name': 'init-volume-secret'}},
-                      'nodeSelector': {},
-                      'securityContext': {},
-                      'tolerations': [],
-                      'volumes': [{'name': 'foo'},
-                                  {'name': 'bar',
-                                   'secret': {'secretName': 'volume-secret'}},
-                                  {'name': 'secretvol' + str(static_uuid),
-                                   'secret': {'secretName': 'init-volume-secret'}},
-                                  {'name': 'secretvol' + str(static_uuid),
-                                   'secret': {'secretName': 'volume-secret'}}
-                                  ]}}
+        expected = {
+            "metadata": {
+                "annotations": {},
+                "labels": {},
+                "name": "bar",
+                "namespace": "baz",
+            },
+            "spec": {
+                "affinity": {},
+                "containers": [
+                    {
+                        "args": [],
+                        "command": ["airflow"],
+                        "env": [
+                            {"name": "test_key", "value": "test_value"},
+                            {
+                                "name": "AIRFLOW_SECRET",
+                                "valueFrom": {
+                                    "secretKeyRef": {
+                                        "key": "airflow_config",
+                                        "name": "secret_name",
+                                    }
+                                },
+                            },
+                        ],
+                        "envFrom": [],
+                        "image": "foo",
+                        "imagePullPolicy": "Never",
+                        "name": "base",
+                        "resources": {
+                            "limits": {"nvidia.com/gpu": "100G"},
+                            "requests": {"cpu": "100Mi", "memory": "1G"},
+                        },
+                        "volumeMounts": [
+                            {
+                                "mountPath": "/mnt",
+                                "name": "foo",
+                                "readOnly": True,
+                                "subPath": "/",
+                            },
+                            {
+                                "mountPath": "/opt/airflow",
+                                "name": "secretvol" + str(static_uuid),
+                                "readOnly": True,
+                            },
+                        ],
+                    }
+                ],
+                "hostNetwork": False,
+                "imagePullSecrets": [],
+                "initContainers": {
+                    "name": "test-container",
+                    "volumeMounts": {
+                        "mountPath": "/foo/bar",
+                        "name": "init-volume-secret",
+                    },
+                },
+                "nodeSelector": {},
+                "securityContext": {},
+                "tolerations": [],
+                "volumes": [
+                    {"name": "foo"},
+                    {"name": "bar", "secret": {"secretName": "volume-secret"}},
+                    {
+                        "name": "secretvol" + str(static_uuid),
+                        "secret": {"secretName": "init-volume-secret"},
+                    },
+                    {
+                        "name": "secretvol" + str(static_uuid),
+                        "secret": {"secretName": "volume-secret"},
+                    },
+                ],
+            },
+        }
         self.maxDiff = None
         self.assertEqual(expected, result)


[airflow] 01/07: Sync FAB Permissions for all base views (#12162)

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimberman pushed a commit to branch fix-resource-backcompat
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 483386af1abbaa5a2a122f0976aa5e2262e18d21
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Sat Nov 7 17:32:53 2020 +0000

    Sync FAB Permissions for all base views (#12162)
    
    If a user has set `[webserver] update_fab_perms = False` and runs `airflow sync-perm` command to sync all permissions, they will receive the following error:
    
    ```
    webserver_1  | [2020-11-07 15:13:07,431] {decorators.py:113} WARNING - Access is Denied for: can_index on: Airflow
    ```
    
    and if the user was created before and some perms were sync'd a user won't be able to find Security Menu & Configurations View
---
 airflow/bin/cli.py      | 1 +
 tests/core/test_core.py | 2 ++
 2 files changed, 3 insertions(+)

diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py
index 1fb3b88..37f70c4 100644
--- a/airflow/bin/cli.py
+++ b/airflow/bin/cli.py
@@ -1863,6 +1863,7 @@ def sync_perm(args): # noqa
         appbuilder = cached_appbuilder()
         print('Updating permission, view-menu for all existing roles')
         appbuilder.sm.sync_roles()
+        appbuilder.add_permissions(update_perms=True)
         print('Updating permission on all DAG views')
         dags = DagBag(store_serialized_dags=settings.STORE_SERIALIZED_DAGS).dags.values()
         for dag in dags:
diff --git a/tests/core/test_core.py b/tests/core/test_core.py
index b229d53..bf74945 100644
--- a/tests/core/test_core.py
+++ b/tests/core/test_core.py
@@ -1110,6 +1110,7 @@ class CliTests(unittest.TestCase):
             DAG('no_access_control')
         ], dagbag_mock)
         self.appbuilder.sm = mock.Mock()
+        self.appbuilder.add_permissions = mock.Mock()
 
         args = self.parser.parse_args([
             'sync_perm'
@@ -1127,6 +1128,7 @@ class CliTests(unittest.TestCase):
             'no_access_control',
             None,
         )
+        self.appbuilder.add_permissions.assert_called_once_with(update_perms=True)
 
     def expect_dagbag_contains(self, dags, dagbag_mock):
         dagbag = mock.Mock()


[airflow] 03/07: Show Generic Error for Charts & Query View in old UI (#12495)

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimberman pushed a commit to branch fix-resource-backcompat
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 0aeb51fd5e68b3275481767756d7dbe974333b28
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Fri Nov 20 13:31:10 2020 +0000

    Show Generic Error for Charts & Query View in old UI (#12495)
---
 airflow/www/views.py | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/airflow/www/views.py b/airflow/www/views.py
index fdb674d..f11fb84 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -471,8 +471,9 @@ class Airflow(AirflowViewMixin, BaseView):
             df = hook.get_pandas_df(
                 wwwutils.limit_sql(sql, CHART_LIMIT, conn_type=db.conn_type))
             df = df.fillna(0)
-        except Exception as e:
-            payload['error'] += "SQL execution failed. Details: " + str(e)
+        except Exception:
+            log.exception("Chart SQL execution failed")
+            payload['error'] += "SQL execution failed. Contact your System Administrator for more details"
 
         if csv:
             return Response(
@@ -2398,8 +2399,9 @@ class QueryView(wwwutils.DataProfilingMixin, AirflowViewMixin, BaseView):
                     index=False,
                     na_rep='',
                 ) if has_data else ''
-            except Exception as e:
-                flash(str(e), 'error')
+            except Exception:
+                log.exception("Query SQL execution failed")
+                flash("SQL execution failed. Contact your System Administrator for more details", "error")
                 error = True
 
         if has_data and len(df) == QUERY_LIMIT:


[airflow] 05/07: TimeSensor should respect DAG timezone (#9882)

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimberman pushed a commit to branch fix-resource-backcompat
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit bbcbb711d20d966fd62beb6f52118eabbe4a27e0
Author: zikun <33...@users.noreply.github.com>
AuthorDate: Tue Jul 21 00:19:08 2020 +0800

    TimeSensor should respect DAG timezone (#9882)
    
    (cherry picked from commit 9c518fe937f8dc5e35908be96bd075c4ff666755)
---
 UPDATING.md                       |  6 ++---
 airflow/sensors/time_sensor.py    |  2 +-
 tests/sensors/test_time_sensor.py | 52 +++++++++++++++++++++++++++++++++++++++
 3 files changed, 56 insertions(+), 4 deletions(-)

diff --git a/UPDATING.md b/UPDATING.md
index faa75fd..e2285df 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -63,12 +63,12 @@ https://developers.google.com/style/inclusive-documentation
 -->
 ## Airflow 1.10.13
 
-### TimeSensor will consider default_timezone setting.
+### TimeSensor is now timezone aware
 
 Previously `TimeSensor` always compared the `target_time` with the current time in UTC.
 
-Now it will compare `target_time` with the current time in the timezone set by `default_timezone` under the `core`
-section of the config.
+Now it will compare `target_time` with the current time in the timezone of the DAG,
+defaulting to the `default_timezone` in the global config.
 
 ### Removed Kerberos support for HDFS hook
 
diff --git a/airflow/sensors/time_sensor.py b/airflow/sensors/time_sensor.py
index d26c32d..5c41c2c 100644
--- a/airflow/sensors/time_sensor.py
+++ b/airflow/sensors/time_sensor.py
@@ -37,4 +37,4 @@ class TimeSensor(BaseSensorOperator):
 
     def poke(self, context):
         self.log.info('Checking if the time (%s) has come', self.target_time)
-        return timezone.make_naive(timezone.utcnow()).time() > self.target_time
+        return timezone.make_naive(timezone.utcnow(), self.dag.timezone).time() > self.target_time
diff --git a/tests/sensors/test_time_sensor.py b/tests/sensors/test_time_sensor.py
new file mode 100644
index 0000000..c08bdd1
--- /dev/null
+++ b/tests/sensors/test_time_sensor.py
@@ -0,0 +1,52 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from datetime import datetime, time
+
+import pendulum
+from parameterized import parameterized
+
+from airflow.models.dag import DAG
+from airflow.sensors.time_sensor import TimeSensor
+from airflow.utils import timezone
+from tests.compat import patch
+
+DEFAULT_TIMEZONE = "Asia/Singapore"  # UTC+08:00
+DEFAULT_DATE_WO_TZ = datetime(2015, 1, 1)
+DEFAULT_DATE_WITH_TZ = datetime(
+    2015, 1, 1, tzinfo=pendulum.tz.timezone(DEFAULT_TIMEZONE)
+)
+
+
+@patch(
+    "airflow.sensors.time_sensor.timezone.utcnow",
+    return_value=timezone.datetime(2020, 1, 1, 23, 0).replace(tzinfo=timezone.utc),
+)
+class TestTimeSensor:
+    @parameterized.expand(
+        [
+            ("UTC", DEFAULT_DATE_WO_TZ, True),
+            ("UTC", DEFAULT_DATE_WITH_TZ, False),
+            (DEFAULT_TIMEZONE, DEFAULT_DATE_WO_TZ, False),
+        ]
+    )
+    def test_timezone(self, mock_utcnow, default_timezone, start_date, expected):
+        with patch("airflow.settings.TIMEZONE", pendulum.timezone(default_timezone)):
+            dag = DAG("test", default_args={"start_date": start_date})
+            op = TimeSensor(task_id="test", target_time=time(10, 0), dag=dag)
+            assert op.poke(None) == expected