You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@liminal.apache.org by av...@apache.org on 2021/11/14 10:43:56 UTC

[incubator-liminal] branch master updated: [LIMINAL-52]: upgrade to airflow 2.1.2

This is an automated email from the ASF dual-hosted git repository.

aviemzur pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-liminal.git


The following commit(s) were added to refs/heads/master by this push:
     new b633f82  [LIMINAL-52]: upgrade to airflow 2.1.2
b633f82 is described below

commit b633f825b0836bab44591d97f4f8c7c1a324368d
Author: Lidor Ettinger <li...@gmail.com>
AuthorDate: Sun Nov 14 12:42:59 2021 +0200

    [LIMINAL-52]: upgrade to airflow 2.1.2
---
 docs/getting-started/spark_app_demo.md             |   6 +-
 examples/spark-app-demo/k8s/serving.py             |   2 +-
 liminal/runners/airflow/executors/emr.py           |   4 +-
 liminal/runners/airflow/executors/kubernetes.py    |  23 ++-
 .../runners/airflow/operators/cloudformation.py    |  15 +-
 .../airflow/tasks/create_cloudformation_stack.py   |  10 +-
 requirements.txt                                   |  14 +-
 scripts/Dockerfile-airflow                         |   3 +-
 scripts/docker-compose.yml                         | 171 ++++++++++-----------
 scripts/liminal                                    |  15 +-
 scripts/requirements-airflow.txt                   |   5 +-
 scripts/webserver_config.py                        | 128 +++++++++++++++
 .../python/test_python_server_image_builder.py     |   2 +-
 tests/runners/airflow/executors/test_emr.py        |   2 +
 .../tasks/test_create_cloudformation_stack.py      |  14 +-
 15 files changed, 266 insertions(+), 148 deletions(-)

diff --git a/docs/getting-started/spark_app_demo.md b/docs/getting-started/spark_app_demo.md
index 0e27f0a..229f400 100644
--- a/docs/getting-started/spark_app_demo.md
+++ b/docs/getting-started/spark_app_demo.md
@@ -260,7 +260,7 @@ spec:
       volumeMounts:
         - mountPath: "/mnt/gettingstartedvol"
           name: task-pv-storage
-  EOF
+EOF
 ```
 
 Check that the service is running:
@@ -272,13 +272,13 @@ kubectl get pods --namespace=default
 Check that the service is up:
 
 ```BASH
-kubectl exec -it --namespace=default spark-app-demo-- /bin/bash -c "curl localhost/healthcheck"
+kubectl exec -it --namespace=default spark-app-demo -- /bin/bash -c "curl localhost/healthcheck"
 ```
 
 Check the prediction:
 
 ```BASH
-kubectl exec -it --namespace=default spark-app-demo-- /bin/bash -c "curl -X POST -d '{\"petal_width\": \"2.1\"}' localhost/predict"
+kubectl exec -it --namespace=default spark-app-demo -- /bin/bash -c "curl -X POST -d '{\"petal_width\": [1.1,1.1,1.1,1.1]}' localhost/predict"
 ```
 
 ## Debugging Kubernetes Deployments
diff --git a/examples/spark-app-demo/k8s/serving.py b/examples/spark-app-demo/k8s/serving.py
index 02ca58c..a616971 100644
--- a/examples/spark-app-demo/k8s/serving.py
+++ b/examples/spark-app-demo/k8s/serving.py
@@ -29,7 +29,7 @@ def predict(input_json):
     try:
         input_dict = json.loads(input_json)
         model, version = _MODEL_STORE.load_latest_model()
-        result = str(model.predict_proba([[float(input_dict[_PETAL_WIDTH])]])[0][1])
+        result = str(model.predict_proba([list(input_dict[_PETAL_WIDTH])])[0][1])
         return json.dumps({"result": result, "version": version})
 
     except IndexError:
diff --git a/liminal/runners/airflow/executors/emr.py b/liminal/runners/airflow/executors/emr.py
index eab7e87..83354aa 100644
--- a/liminal/runners/airflow/executors/emr.py
+++ b/liminal/runners/airflow/executors/emr.py
@@ -16,8 +16,8 @@
 # specific language governing permissions and limitations
 # under the License.
 
-from airflow.contrib.operators.emr_add_steps_operator import EmrAddStepsOperator
-from airflow.contrib.sensors.emr_step_sensor import EmrStepSensor
+from airflow.providers.amazon.aws.operators.emr_add_steps import EmrAddStepsOperator
+from airflow.providers.amazon.aws.sensors.emr_step import EmrStepSensor
 
 from liminal.runners.airflow.model import executor
 from liminal.runners.airflow.tasks import hadoop
diff --git a/liminal/runners/airflow/executors/kubernetes.py b/liminal/runners/airflow/executors/kubernetes.py
index eb1c34c..be1e25e 100644
--- a/liminal/runners/airflow/executors/kubernetes.py
+++ b/liminal/runners/airflow/executors/kubernetes.py
@@ -21,9 +21,8 @@ import datetime
 import logging
 import os
 
-from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator
-from airflow.kubernetes.volume import Volume
-from airflow.kubernetes.volume_mount import VolumeMount
+from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator
+from kubernetes.client import models as k8s, V1Volume, V1VolumeMount
 
 from liminal.core.util import env_util
 from liminal.runners.airflow.config.standalone_variable_backend import get_variable
@@ -74,12 +73,10 @@ class KubernetesPodExecutor(executor.Executor):
             claim_name = volume_config.get('claim_name')
             if not claim_name and 'local' in volume_config:
                 claim_name = f'{name}-pvc'
-            volume = Volume(
+            volume = V1Volume(
                 name=name,
-                configs={
-                    'persistentVolumeClaim': {
-                        'claimName': claim_name
-                    }
+                persistent_volume_claim={
+                    'claimName': claim_name
                 }
             )
             volumes.append(volume)
@@ -98,7 +95,7 @@ class KubernetesPodExecutor(executor.Executor):
             'get_logs': config.pop('get_logs', True),
             'is_delete_operator_pod': config.pop('is_delete_operator_pod', True),
             'startup_timeout_seconds': config.pop('startup_timeout_seconds', 1200),
-            'env_vars': task.env_vars,
+            'env_vars': [k8s.V1EnvVar(name=x, value=v) for x, v in task.env_vars.items()],
             'do_xcom_push': task.task_config.get('do_xcom_push', False),
             'image_pull_secrets': config.pop('image_pull_secrets', 'regcred'),
             'volumes': self.volumes,
@@ -106,10 +103,10 @@ class KubernetesPodExecutor(executor.Executor):
             'cluster_context': os.environ.get('AIRFLOW__KUBERNETES__CLUSTER_CONTEXT', None),
             'cmds': task.cmds,
             'volume_mounts': [
-                VolumeMount(mount['volume'],
-                            mount['path'],
-                            mount.get('sub_path'),
-                            mount.get('read_only', False))
+                V1VolumeMount(name=mount['volume'],
+                              mount_path=mount['path'],
+                              sub_path=mount.get('sub_path'),
+                              read_only=mount.get('read_only', False))
                 for mount
                 in task.mounts
             ]
diff --git a/liminal/runners/airflow/operators/cloudformation.py b/liminal/runners/airflow/operators/cloudformation.py
index c523579..dfbc262 100644
--- a/liminal/runners/airflow/operators/cloudformation.py
+++ b/liminal/runners/airflow/operators/cloudformation.py
@@ -22,15 +22,14 @@ Can be removed when Airflow 2.0.0 is released.
 """
 from typing import List
 
-from airflow.contrib.hooks.aws_hook import AwsHook
+from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 from airflow.models import BaseOperator
-from airflow.sensors.base_sensor_operator import BaseSensorOperator
-from airflow.utils.decorators import apply_defaults
+from airflow.sensors.base import BaseSensorOperator
 from botocore.exceptions import ClientError
 
 
 # noinspection PyAbstractClass
-class CloudFormationHook(AwsHook):
+class CloudFormationHook(AwsBaseHook):
     """
     Interact with AWS CloudFormation.
     """
@@ -38,7 +37,7 @@ class CloudFormationHook(AwsHook):
     def __init__(self, region_name=None, *args, **kwargs):
         self.region_name = region_name
         self.conn = None
-        super().__init__(*args, **kwargs)
+        super().__init__(*args, client_type='cloudformation', **kwargs)
 
     def get_conn(self):
         self.conn = self.get_client_type('cloudformation', self.region_name)
@@ -59,7 +58,6 @@ class BaseCloudFormationOperator(BaseOperator):
     ui_color = '#1d472b'
     ui_fgcolor = '#FFF'
 
-    @apply_defaults
     def __init__(
             self,
             params,
@@ -95,7 +93,6 @@ class CloudFormationCreateStackOperator(BaseCloudFormationOperator):
     template_ext = ()
     ui_color = '#6b9659'
 
-    @apply_defaults
     def __init__(
             self,
             params,
@@ -122,7 +119,6 @@ class CloudFormationDeleteStackOperator(BaseCloudFormationOperator):
     ui_color = '#1d472b'
     ui_fgcolor = '#FFF'
 
-    @apply_defaults
     def __init__(
             self,
             params,
@@ -147,7 +143,6 @@ class BaseCloudFormationSensor(BaseSensorOperator):
     :type poke_interval: int
     """
 
-    @apply_defaults
     def __init__(self,
                  stack_name,
                  complete_status,
@@ -221,7 +216,6 @@ class CloudFormationCreateStackSensor(BaseCloudFormationSensor):
     template_fields = ['stack_name']
     ui_color = '#C5CAE9'
 
-    @apply_defaults
     def __init__(self,
                  stack_name,
                  aws_conn_id='aws_default',
@@ -253,7 +247,6 @@ class CloudFormationDeleteStackSensor(BaseCloudFormationSensor):
     template_fields = ['stack_name']
     ui_color = '#C5CAE9'
 
-    @apply_defaults
     def __init__(self,
                  stack_name,
                  aws_conn_id='aws_default',
diff --git a/liminal/runners/airflow/tasks/create_cloudformation_stack.py b/liminal/runners/airflow/tasks/create_cloudformation_stack.py
index 9a7aef9..c94d640 100644
--- a/liminal/runners/airflow/tasks/create_cloudformation_stack.py
+++ b/liminal/runners/airflow/tasks/create_cloudformation_stack.py
@@ -16,8 +16,8 @@
 # specific language governing permissions and limitations
 # under the License.
 
-from airflow.operators.dummy_operator import DummyOperator
-from airflow.operators.python_operator import BranchPythonOperator
+from airflow.operators.dummy import DummyOperator
+from airflow.operators.python import BranchPythonOperator
 from flatdict import FlatDict
 
 from liminal.runners.airflow.operators.cloudformation import CloudFormationCreateStackOperator, \
@@ -38,10 +38,9 @@ class CreateCloudFormationStackTask(airflow.AirflowTask):
 
     def apply_task_to_dag(self):
         check_cloudformation_stack_exists_task = BranchPythonOperator(
-            templates_dict={'stack_name': self.stack_name},
+            op_kwargs={'stack_name': self.stack_name},
             task_id=f'is-cloudformation-{self.task_id}-running',
             python_callable=self.__cloudformation_stack_running_branch,
-            provide_context=True,
             dag=self.dag
         )
 
@@ -75,9 +74,8 @@ class CreateCloudFormationStackTask(airflow.AirflowTask):
 
         return stack_creation_end_task
 
-    def __cloudformation_stack_running_branch(self, **kwargs):
+    def __cloudformation_stack_running_branch(self, stack_name):
         cloudformation = CloudFormationHook().get_conn()
-        stack_name = kwargs['templates_dict']['stack_name']
         try:
             stack_status = cloudformation.describe_stacks(StackName=stack_name)['Stacks'][0][
                 'StackStatus']
diff --git a/requirements.txt b/requirements.txt
index c074989..1b3b0bf 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -17,13 +17,12 @@
 # under the License.
 
 docker==4.2.0
-apache-airflow==1.10.12
+apache-airflow==2.1.2
 click==7.1.1
 Flask==1.1.1
 pyyaml==5.3.1
-boto3==1.12.10
-botocore==1.15.21
-kubernetes==12.0.1
+boto3==1.17.112
+botocore==1.20.112
 wheel==0.36.2
 termcolor~=1.1.0
 docker-pycreds==0.4.0
@@ -39,4 +38,9 @@ statsd>=3.3.0, <4.0
 sqlalchemy~=1.3.15
 flatdict==3.4.0
 jinja2>=2.10.1, <2.11.0
-python-json-logger==2.0.1
\ No newline at end of file
+python-json-logger==2.0.1
+requests==2.25.0
+apache-airflow-providers-amazon==1.4.0
+apache-airflow-providers-cncf-kubernetes==1.0.2
+#apache-airflow-providers-google==4.0.0
+#apache-airflow[google,amazon,apache.spark]==2.0.0
\ No newline at end of file
diff --git a/scripts/Dockerfile-airflow b/scripts/Dockerfile-airflow
index 284b1b9..4ee5a2e 100644
--- a/scripts/Dockerfile-airflow
+++ b/scripts/Dockerfile-airflow
@@ -16,7 +16,7 @@
 # specific language governing permissions and limitations
 # under the License.
 
-FROM apache/airflow:1.10.12-python3.6
+FROM apache/airflow:2.1.2-python3.8
 
 ARG LIMINAL_VERSION='apache-liminal'
 ARG DAG_FOLDER='/opt/airflow/dags/'
@@ -25,6 +25,7 @@ ADD *.whl ${DAG_FOLDER}
 
 RUN ls -ls ${DAG_FOLDER}
 ADD liminal/runners/airflow/dag/liminal_dags.py ${DAG_FOLDER}
+ADD scripts/webserver_config.py /opt/airflow/
 
 USER airflow
 
diff --git a/scripts/docker-compose.yml b/scripts/docker-compose.yml
index 2c284fc..2098362 100644
--- a/scripts/docker-compose.yml
+++ b/scripts/docker-compose.yml
@@ -15,98 +15,85 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+---
+version: '3'
+x-airflow-common:
+  &airflow-common
+  image: liminal-airflow
+  build:
+    context: .
+    dockerfile: scripts/Dockerfile-airflow
+    args:
+        LIMINAL_VERSION: ${LIMINAL_VERSION}
+  environment:
+    &airflow-common-env
+    LOAD_EX: 'n'
+    AIRFLOW__CORE__EXECUTOR: 'LocalExecutor'
+    KUBECONFIG: /home/airflow/kube/config
+    AIRFLOW__CORE__LOAD_EXAMPLES: 'False'
+    AIRFLOW__WEBSERVER__WORKERS: '1'
+    AIRFLOW__WEBSERVER__EXPOSE_CONFIG: 'True'
+    _AIRFLOW_DB_UPGRADE: 'true'
+    LIMINAL_HOME: /opt/airflow/dags
+    AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS: 'False'
+    AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres:5432/airflow
+    AIRFLOW_CONN_METADATA_DB: postgresql+psycopg2://airflow:airflow@postgres:5432/airflow
+    AIRFLOW_VAR__METADATA_DB_SCHEMA: airflow
+  volumes:
+    - ${LIMINAL_HOME}:/opt/airflow/dags
+    - ${LIMINAL_HOME}/logs:/opt/airflow/logs
+    - ${HOME}/.kube:/home/airflow/kube
+  depends_on:
+    postgres:
+      condition: service_healthy
+  healthcheck:
+      test: ["CMD-SHELL", "[ -f /usr/local/airflow/airflow-webserver.pid ]"]
+      interval: 30s
+      timeout: 30s
+      retries: 3
+  logging:
+      options:
+          max-size: 10m
+          max-file: "3"
+  restart: always
 
-    version: '3.7'
-    services:
-        postgres:
-            image: postgres:9.6
-            container_name: liminal-postgress
-            environment:
-                - POSTGRES_USER=airflow
-                - POSTGRES_PASSWORD=airflow
-                - POSTGRES_DB=
-            ports:
-                - "5432:5432"
-            volumes:
-                - ${LIMINAL_HOME}/db:/var/lib/postgresql/data
-            logging:
-                options:
-                    max-size: 10m
-                    max-file: "3"
+services:
+    postgres:
+        image: postgres:13
+        container_name: liminal-postgress
+        environment:
+          POSTGRES_USER: airflow
+          POSTGRES_PASSWORD: airflow
+          POSTGRES_DB: ""
+        ports:
+            - "5432:5432"
+        volumes:
+            - ${LIMINAL_HOME}/db:/var/lib/postgresql/data
+        logging:
+            options:
+                max-size: 10m
+                max-file: "3"
+        healthcheck:
+            test: ["CMD", "pg_isready", "-U", "airflow"]
+            interval: 30s
+            timeout: 30s
+            retries: 3
+        restart: always
 
-        webserver:
-            build:
-                context: .
-                dockerfile: scripts/Dockerfile-airflow
-                args:
-                    LIMINAL_VERSION: ${LIMINAL_VERSION}
-            image: liminal-airflow
-            container_name: liminal-webserver
-            depends_on:
-                - postgres
-            environment:
-                - LOAD_EX=n
-                - EXECUTOR=Local
-                - KUBECONFIG=/home/airflow/kube/config
-                - AIRFLOW__WEBSERVER__WORKERS=1
-                - LIMINAL_HOME=/opt/airflow/dags
-                - AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=False
-                - AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgres+psycopg2://airflow:airflow@postgres:5432/airflow
-                - AIRFLOW_CONN_METADATA_DB=postgres+psycopg2://airflow:airflow@postgres:5432/airflow
-                - AIRFLOW_VAR__METADATA_DB_SCHEMA=airflow
-            logging:
-                options:
-                    max-size: 10m
-                    max-file: "3"
-            volumes:
-                - ${LIMINAL_HOME}:/opt/airflow/dags
-                - ${LIMINAL_HOME}/logs:/opt/airflow/logs
-                - ${HOME}/.kube:/home/airflow/kube
-            ports:
-                - "8080:8080"
-            command: "webserver"
-            healthcheck:
-                test: ["CMD-SHELL", "[ -f /usr/local/airflow/airflow-webserver.pid ]"]
-                interval: 30s
-                timeout: 30s
-                retries: 3
+    webserver:
+        <<: *airflow-common
+        container_name: liminal-webserver
+        environment:
+          <<: *airflow-common-env
+        ports:
+            - "8080:8080"
+        command: "webserver"
 
-
-        scheduler:
-            build:
-                context: .
-                dockerfile: scripts/Dockerfile-airflow
-                args:
-                    LIMINAL_VERSION: ${LIMINAL_VERSION}
-            image: liminal-airflow
-            container_name: liminal-scheduler
-            depends_on:
-                - postgres
-                - webserver
-            environment:
-                - LOAD_EX=n
-                - EXECUTOR=Local
-                - AIRFLOW__WEBSERVER__WORKERS=1
-                - KUBECONFIG=/home/airflow/kube/config
-                - LIMINAL_HOME=/opt/airflow/dags
-                - AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=False
-                - AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgres+psycopg2://airflow:airflow@postgres:5432/airflow
-                - AIRFLOW_CONN_METADATA_DB=postgres+psycopg2://airflow:airflow@postgres:5432/airflow
-                - AIRFLOW_VAR__METADATA_DB_SCHEMA=airflow
-                - AIRFLOW__SCHEDULER__SCHEDULER_HEARTBEAT_SEC=10
-            logging:
-                options:
-                    max-size: 10m
-                    max-file: "3"
-            volumes:
-                - ${LIMINAL_HOME}:/opt/airflow/dags
-                - ${LIMINAL_HOME}/logs:/opt/airflow/logs
-                - ${HOME}/.kube:/home/airflow/kube
-            ports:
-                - "8793:8793"
-            command: "scheduler"
-            healthcheck:
-                test: ["CMD-SHELL", "[ -f /usr/local/airflow/airflow-webserver.pid ]"]
-                interval: 30s
-                timeout: 30s
-                retries: 3
\ No newline at end of file
+    scheduler:
+        <<: *airflow-common
+        container_name: liminal-scheduler
+        environment:
+          <<: *airflow-common-env
+        ports:
+            - "8793:8793"
+        command: "scheduler"
diff --git a/scripts/liminal b/scripts/liminal
index 397c02d..d560ea0 100755
--- a/scripts/liminal
+++ b/scripts/liminal
@@ -95,9 +95,16 @@ def deploy_liminal_core_internal(liminal_home, clean):
                         os.path.join(project_dir, local_file_name))
     if clean:
         docker_compose_command('down', ['--remove-orphans', '--rmi', 'local'])
-        docker_compose_command('build', ['--no-cache'])
-        docker_compose_command('run', ['webserver', 'resetdb', '-y'])
-        docker_compose_command('run', ['webserver', 'initdb'])
+        docker_compose_command('build', [])
+        docker_compose_command('run', ['webserver', 'db reset', '-y'])
+        docker_compose_command('run', ['webserver', 'db init'])
+        docker_compose_command('run', ['webserver', 'users create '
+                                                    '--role Admin '
+                                                    '--username admin '
+                                                    '--email admin '
+                                                    '--firstname admin '
+                                                    '--lastname admin '
+                                                    '--password admin'])
         docker_compose_command('down', ['--remove-orphans'])
 
 
@@ -137,7 +144,7 @@ def deploy_liminal_apps(path, clean):
     config_files = files_util.find_config_files(path)
     for config_file in config_files:
         click.echo(f"deploying liminal file: {config_file}")
-        relative_path = os.path.relpath(config_file)
+        relative_path = os.path.relpath(config_file, os.path.dirname(path))
         target_yml_name = os.path.join(environment.get_dags_dir(), relative_path)
         pathlib.Path(target_yml_name).parent.mkdir(parents=True, exist_ok=True)
         shutil.copyfile(config_file, target_yml_name)
diff --git a/scripts/requirements-airflow.txt b/scripts/requirements-airflow.txt
index 7f5c9e8..a9bf995 100644
--- a/scripts/requirements-airflow.txt
+++ b/scripts/requirements-airflow.txt
@@ -21,4 +21,7 @@ pyyaml
 boto3==1.12.10
 botocore==1.15.21
 kubernetes
-flatdict==3.4.0
\ No newline at end of file
+diskcache==3.1.1
+flatdict==4.0.1
+kafka-python==2.0.2
+influxdb-client
\ No newline at end of file
diff --git a/scripts/webserver_config.py b/scripts/webserver_config.py
new file mode 100644
index 0000000..d7e953d
--- /dev/null
+++ b/scripts/webserver_config.py
@@ -0,0 +1,128 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Default configuration for the Airflow webserver"""
+import os
+
+from flask_appbuilder.security.manager import AUTH_DB
+
+# from flask_appbuilder.security.manager import AUTH_LDAP
+# from flask_appbuilder.security.manager import AUTH_OAUTH
+# from flask_appbuilder.security.manager import AUTH_OID
+# from flask_appbuilder.security.manager import AUTH_REMOTE_USER
+
+
+basedir = os.path.abspath(os.path.dirname(__file__))
+
+# Flask-WTF flag for CSRF
+WTF_CSRF_ENABLED = True
+
+# ----------------------------------------------------
+# AUTHENTICATION CONFIG
+# ----------------------------------------------------
+# For details on how to set up each of the following authentication, see
+# http://flask-appbuilder.readthedocs.io/en/latest/security.html# authentication-methods
+# for details.
+
+# The authentication type
+# AUTH_OID : Is for OpenID
+# AUTH_DB : Is for database
+# AUTH_LDAP : Is for LDAP
+# AUTH_REMOTE_USER : Is for using REMOTE_USER from web server
+# AUTH_OAUTH : Is for OAuth
+AUTH_TYPE = AUTH_DB
+
+# Uncomment to setup Full admin role name
+# AUTH_ROLE_ADMIN = 'Admin'
+
+# Uncomment to setup Public role name, no authentication needed
+AUTH_ROLE_PUBLIC = 'Admin'
+
+# Will allow user self registration
+# AUTH_USER_REGISTRATION = True
+
+# The recaptcha it's automatically enabled for user self registration is active and the keys are necessary
+# RECAPTCHA_PRIVATE_KEY = PRIVATE_KEY
+# RECAPTCHA_PUBLIC_KEY = PUBLIC_KEY
+
+# Config for Flask-Mail necessary for user self registration
+# MAIL_SERVER = 'smtp.gmail.com'
+# MAIL_USE_TLS = True
+# MAIL_USERNAME = 'yourappemail@gmail.com'
+# MAIL_PASSWORD = 'passwordformail'
+# MAIL_DEFAULT_SENDER = 'sender@gmail.com'
+
+# The default user self registration role
+# AUTH_USER_REGISTRATION_ROLE = "Public"
+
+# When using OAuth Auth, uncomment to setup provider(s) info
+# Google OAuth example:
+# OAUTH_PROVIDERS = [{
+#   'name':'google',
+#     'token_key':'access_token',
+#     'icon':'fa-google',
+#         'remote_app': {
+#             'api_base_url':'https://www.googleapis.com/oauth2/v2/',
+#             'client_kwargs':{
+#                 'scope': 'email profile'
+#             },
+#             'access_token_url':'https://accounts.google.com/o/oauth2/token',
+#             'authorize_url':'https://accounts.google.com/o/oauth2/auth',
+#             'request_token_url': None,
+#             'client_id': GOOGLE_KEY,
+#             'client_secret': GOOGLE_SECRET_KEY,
+#         }
+# }]
+
+# When using LDAP Auth, setup the ldap server
+# AUTH_LDAP_SERVER = "ldap://ldapserver.new"
+
+# When using OpenID Auth, uncomment to setup OpenID providers.
+# example for OpenID authentication
+# OPENID_PROVIDERS = [
+#    { 'name': 'Yahoo', 'url': 'https://me.yahoo.com' },
+#    { 'name': 'AOL', 'url': 'http://openid.aol.com/<username>' },
+#    { 'name': 'Flickr', 'url': 'http://www.flickr.com/<username>' },
+#    { 'name': 'MyOpenID', 'url': 'https://www.myopenid.com' }]
+
+# ----------------------------------------------------
+# Theme CONFIG
+# ----------------------------------------------------
+# Flask App Builder comes up with a number of predefined themes
+# that you can use for Apache Airflow.
+# http://flask-appbuilder.readthedocs.io/en/latest/customizing.html#changing-themes
+# Please make sure to remove "navbar_color" configuration from airflow.cfg
+# in order to fully utilize the theme. (or use that property in conjunction with theme)
+# APP_THEME = "bootstrap-theme.css"  # default bootstrap
+# APP_THEME = "amelia.css"
+# APP_THEME = "cerulean.css"
+# APP_THEME = "cosmo.css"
+# APP_THEME = "cyborg.css"
+# APP_THEME = "darkly.css"
+# APP_THEME = "flatly.css"
+# APP_THEME = "journal.css"
+# APP_THEME = "lumen.css"
+# APP_THEME = "paper.css"
+# APP_THEME = "readable.css"
+# APP_THEME = "sandstone.css"
+# APP_THEME = "simplex.css"
+# APP_THEME = "slate.css"
+# APP_THEME = "solar.css"
+# APP_THEME = "spacelab.css"
+# APP_THEME = "superhero.css"
+# APP_THEME = "united.css"
+# APP_THEME = "yeti.css"
\ No newline at end of file
diff --git a/tests/runners/airflow/build/http/python/test_python_server_image_builder.py b/tests/runners/airflow/build/http/python/test_python_server_image_builder.py
index f8173e9..ee40095 100644
--- a/tests/runners/airflow/build/http/python/test_python_server_image_builder.py
+++ b/tests/runners/airflow/build/http/python/test_python_server_image_builder.py
@@ -80,7 +80,7 @@ class TestPythonServer(TestCase):
         thread = threading.Thread(target=self.__run_container)
         thread.start()
 
-        time.sleep(5)
+        time.sleep(20)
 
         logging.info('Sending request to server')
 
diff --git a/tests/runners/airflow/executors/test_emr.py b/tests/runners/airflow/executors/test_emr.py
index e01fed9..b4d107b 100644
--- a/tests/runners/airflow/executors/test_emr.py
+++ b/tests/runners/airflow/executors/test_emr.py
@@ -109,6 +109,8 @@ class TestEMRExecutorTask(TestCase):
 
         self.assertIsInstance(emr_add_step_task, EmrAddStepsOperator)
 
+        emr_add_step_task.render_template_fields({})
+
         step_id = emr_add_step_task.execute(self.dag.context)[0]
 
         desc_step = self.client.describe_step(
diff --git a/tests/runners/airflow/tasks/test_create_cloudformation_stack.py b/tests/runners/airflow/tasks/test_create_cloudformation_stack.py
index 58315e1..a2317d1 100644
--- a/tests/runners/airflow/tasks/test_create_cloudformation_stack.py
+++ b/tests/runners/airflow/tasks/test_create_cloudformation_stack.py
@@ -19,8 +19,8 @@ from datetime import datetime
 from unittest import TestCase, mock
 from unittest.mock import MagicMock
 
-from airflow.operators.dummy_operator import DummyOperator
-from airflow.operators.python_operator import BranchPythonOperator
+from airflow.operators.dummy import DummyOperator
+from airflow.operators.python import BranchPythonOperator
 from moto import mock_cloudformation
 
 from liminal.runners.airflow.executors import airflow
@@ -101,8 +101,8 @@ class TestCreateCloudFormationStackTask(TestCase):
             is_cloudformation_exists = self.dag.tasks[0]
 
             print(is_cloudformation_exists)
-            self.assertEqual(is_cloudformation_exists.python_callable(
-                templates_dict={'stack_name': self.cluster_name}),
+            self.assertEqual(
+                is_cloudformation_exists.python_callable(stack_name=self.cluster_name),
                 'create-cloudformation-create_emr')
 
     def test_cloudformation_exist_and_running(self):
@@ -122,8 +122,7 @@ class TestCreateCloudFormationStackTask(TestCase):
                 mock_conn.return_value = mock_cloudformation_conn
 
                 self.assertEqual(
-                    is_cloudformation_exists.python_callable(
-                        templates_dict={'stack_name': self.cluster_name}),
+                    is_cloudformation_exists.python_callable(stack_name=self.cluster_name),
                     'creation-end-create_emr')
 
     def test_cloudformation_exists_and_not_running(self):
@@ -143,8 +142,7 @@ class TestCreateCloudFormationStackTask(TestCase):
                 mock_conn.return_value = mock_cloudformation_conn
 
                 self.assertEqual(
-                    is_cloudformation_exists.python_callable(
-                        templates_dict={'stack_name': self.cluster_name}),
+                    is_cloudformation_exists.python_callable(stack_name=self.cluster_name),
                     'create-cloudformation-create_emr')
 
     def test_cloudformation_create_stack_operator_task(self):