You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2020/06/02 16:46:19 UTC
[airflow] branch master updated: Add fudament for API based on
connexion (#8149)
This is an automated email from the ASF dual-hosted git repository.
kamilbregula pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/master by this push:
new 67379d1 Add fudament for API based on connexion (#8149)
67379d1 is described below
commit 67379d1d9e0f19fa10020e37d12e407fb07f21bc
Author: Kamil BreguĊa <mi...@users.noreply.github.com>
AuthorDate: Tue Jun 2 18:45:37 2020 +0200
Add fudament for API based on connexion (#8149)
---
.pre-commit-config.yaml | 7 +-
MANIFEST.in | 1 +
MANIFEST.in => airflow/api_connexion/__init__.py | 20 ----
.../api_connexion/endpoints/__init__.py | 20 ----
.../api_connexion/endpoints/config_endpoint.py | 28 ++----
.../api_connexion/endpoints/connection_endpoint.py | 56 +++++++----
.../api_connexion/endpoints/dag_endpoint.py | 51 ++++++----
.../api_connexion/endpoints/dag_run_endpoint.py | 61 ++++++++++++
.../api_connexion/endpoints/dag_source_endpoint.py | 28 ++----
.../api_connexion/endpoints/event_log_endpoint.py | 35 ++++---
.../api_connexion/endpoints/extra_link_endpoint.py | 28 ++----
.../api_connexion/endpoints/health_endpoint.py | 28 ++----
.../endpoints/import_errror_endpoint.py | 42 +++++----
.../api_connexion/endpoints/log_endpoint.py | 28 ++----
.../api_connexion/endpoints/pool_endpoint.py | 56 +++++++----
.../api_connexion/endpoints/task_endpoint.py | 35 ++++---
.../endpoints/task_instance_endpoint.py | 49 ++++++----
.../api_connexion/endpoints/variable_endpoint.py | 56 +++++++----
.../api_connexion/endpoints/xcom_endpoint.py | 56 +++++++----
.../api_connexion/openapi/v1.yaml | 105 ++++++++++++---------
airflow/config_templates/airflow_local_settings.py | 2 +-
airflow/www/app.py | 16 ++++
requirements/requirements-python3.6.txt | 34 ++++---
requirements/requirements-python3.7.txt | 28 +++---
requirements/setup-3.6.md5 | 2 +-
requirements/setup-3.7.md5 | 2 +-
setup.py | 4 +-
MANIFEST.in => tests/api_connexion/__init__.py | 20 ----
.../api_connexion/endpoints/__init__.py | 20 ----
.../endpoints/test_config_endpoint.py | 38 ++++----
.../endpoints/test_connection_endpoint.py | 66 +++++++++++++
tests/api_connexion/endpoints/test_dag_endpoint.py | 59 ++++++++++++
.../endpoints/test_dag_run_endpoint.py | 66 +++++++++++++
.../endpoints/test_dag_source_endpoint.py | 46 +++++++++
.../endpoints/test_event_log_endpoint.py | 45 +++++++++
.../endpoints/test_extra_link_endpoint.py | 40 ++++----
.../endpoints/test_health_endpoint.py | 36 ++++---
.../endpoints/test_import_errror_endpoint.py | 52 ++++++++++
.../api_connexion/endpoints/test_log_endpoint.py | 40 ++++----
.../api_connexion/endpoints/test_pool_endpoint.py | 66 +++++++++++++
.../api_connexion/endpoints/test_task_endpoint.py | 45 +++++++++
.../endpoints/test_task_instance_endpoint.py | 61 ++++++++++++
.../endpoints/test_variable_endpoint.py | 66 +++++++++++++
.../api_connexion/endpoints/test_xcom_endpoint.py | 76 +++++++++++++++
tests/www/test_app.py | 4 +
45 files changed, 1222 insertions(+), 502 deletions(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index c9f9210..e84c0f6 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -179,14 +179,13 @@ metastore_browser/templates/.*\\.html$|.*\\.jinja2"
name: Lint OpenAPI using speccy
language: docker_image
entry: wework/speccy lint
- files: ^openapi.yaml$
+ files: ^airflow/api_connexion/openapi/
- id: lint-openapi
name: Lint OpenAPI using openapi-spec-validator
- entry: openapi-spec-validator --schema 3.0.0 openapi.yaml
+ entry: openapi-spec-validator --schema 3.0.0
language: python
additional_dependencies: ['openapi-spec-validator']
- pass_filenames: false
- files: ^openapi.yaml$
+ files: ^airflow/api_connexion/openapi/
- id: lint-dockerfile
name: Lint dockerfile
language: system
diff --git a/MANIFEST.in b/MANIFEST.in
index c29a6a8..67e50a3 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -34,3 +34,4 @@ graft airflow/config_templates
recursive-exclude airflow/www/node_modules *
global-exclude __pycache__ *.pyc
include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+include airflow/api_connexion/openapi/v1.yaml
diff --git a/MANIFEST.in b/airflow/api_connexion/__init__.py
similarity index 59%
copy from MANIFEST.in
copy to airflow/api_connexion/__init__.py
index c29a6a8..13a8339 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/__init__.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -15,22 +14,3 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/__init__.py
similarity index 59%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/__init__.py
index c29a6a8..13a8339 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/__init__.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -15,22 +14,3 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/config_endpoint.py
similarity index 59%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/config_endpoint.py
index c29a6a8..1218d21 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/config_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,12 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8136
+
+
+def get_config():
+ """
+ Get current configuration.
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/connection_endpoint.py
similarity index 51%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/connection_endpoint.py
index c29a6a8..da85f79 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/connection_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,40 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8127
+
+
+def delete_connection():
+ """
+ Delete a connection entry
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_connection():
+ """
+ Get a connection entry
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_connections():
+ """
+ Get all connection entries
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def patch_connection():
+ """
+ Update a connection entry
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def post_connection():
+ """
+ Create connection entry
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/dag_endpoint.py
similarity index 54%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/dag_endpoint.py
index c29a6a8..7cbb0ef 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/dag_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,35 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at:
+# * https://github.com/apache/airflow/issues/8128
+# * https://github.com/apache/airflow/issues/8138
+
+
+def get_dag():
+ """
+ Get basic information about a DAG.
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_dag_details():
+ """
+ Get details of DAG.
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_dags():
+ """
+ Get all DAGs.
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def patch_dag():
+ """
+ Update the specific DAG
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py
new file mode 100644
index 0000000..810009a
--- /dev/null
+++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8129
+
+
+def delete_dag_run():
+ """
+ Delete a DAG Run
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_dag_run():
+ """
+ Get a DAG Run.
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_dag_runs():
+ """
+ Get all DAG Runs.
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_dag_runs_batch():
+ """
+ Get list of DAG Runs
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def patch_dag_run():
+ """
+ Update a DAG Run
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def post_dag_run():
+ """
+ Trigger a DAG.
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/dag_source_endpoint.py
similarity index 59%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/dag_source_endpoint.py
index c29a6a8..71982b8 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/dag_source_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,12 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8137
+
+
+def get_dag_source():
+ """
+ Get source code using file token
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/event_log_endpoint.py
similarity index 59%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/event_log_endpoint.py
index c29a6a8..9a10506 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/event_log_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,19 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8135
+
+
+def get_event_log():
+ """
+ Get a log entry
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_event_logs():
+ """
+ Get all log entries from event log
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/extra_link_endpoint.py
similarity index 59%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/extra_link_endpoint.py
index c29a6a8..2834051 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/extra_link_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,12 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8140
+
+
+def get_extra_links():
+ """
+ Get extra links for task instance
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/health_endpoint.py
similarity index 59%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/health_endpoint.py
index c29a6a8..fa1099b 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/health_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,12 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8144
+
+
+def get_health():
+ """
+ Checks if the API works
+ """
+ return "OK"
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/import_errror_endpoint.py
similarity index 59%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/import_errror_endpoint.py
index c29a6a8..0f180d9 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/import_errror_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,26 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8130
+
+
+def delete_import_error():
+ """
+ Delete an import error
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_import_error():
+ """
+ Get an import errors
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_import_errors():
+ """
+ Get all import errors
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/log_endpoint.py
similarity index 59%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/log_endpoint.py
index c29a6a8..df66a95 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/log_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,12 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8135
+
+
+def get_log():
+ """
+ Get logs for specific task instance
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/pool_endpoint.py
similarity index 54%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/pool_endpoint.py
index c29a6a8..af4c4c2 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/pool_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,40 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8131
+
+
+def delete_pool():
+ """
+ Delete a pool
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_pool():
+ """
+ Get a pool
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_pools():
+ """
+ Get all pools
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def patch_pool():
+ """
+ Update a pool
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def post_pool():
+ """
+ Create aa pool
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/task_endpoint.py
similarity index 59%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/task_endpoint.py
index c29a6a8..de7eaa4 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/task_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,19 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8138
+
+
+def get_task():
+ """
+ Get simplified representation of a task.
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_tasks():
+ """
+ Get tasks for DAG
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/task_instance_endpoint.py
similarity index 54%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/task_instance_endpoint.py
index c29a6a8..7f2ecfb 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/task_instance_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,33 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8132
+
+
+def get_task_instance():
+ """
+ Get a task instance
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_task_instances():
+ """
+ Get list of task instances of DAG.
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_task_instances_batch():
+ """
+ Get list of task instances.
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def post_clear_task_instances():
+ """
+ Clear task instances.
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/variable_endpoint.py
similarity index 52%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/variable_endpoint.py
index c29a6a8..40394d5 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/variable_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,40 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: https://github.com/apache/airflow/issues/8142
+
+
+def delete_variable():
+ """
+ Delete variable
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_variable():
+ """
+ Get a variables by key
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_variables():
+ """
+ Get all variable values
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def patch_variable():
+ """
+ Update a variable by key
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def post_variables():
+ """
+ Create a variable
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/MANIFEST.in b/airflow/api_connexion/endpoints/xcom_endpoint.py
similarity index 51%
copy from MANIFEST.in
copy to airflow/api_connexion/endpoints/xcom_endpoint.py
index c29a6a8..c67af40 100644
--- a/MANIFEST.in
+++ b/airflow/api_connexion/endpoints/xcom_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,21 +15,40 @@
# specific language governing permissions and limitations
# under the License.
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+# TODO(mik-laj): We have to implement it.
+# Do you want to help? Please look at: sshttps://github.com/apache/airflow/issues/8134
+
+
+def delete_xcom_entry():
+ """
+ Delete an XCom entry
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_xcom_entries():
+ """
+ Get all XCom values
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def get_xcom_entry():
+ """
+ Get an XCom entry
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def patch_xcom_entry():
+ """
+ Update an XCom entry
+ """
+ raise NotImplementedError("Not implemented yet.")
+
+
+def post_xcom_entries():
+ """
+ Create an XCom entry
+ """
+ raise NotImplementedError("Not implemented yet.")
diff --git a/openapi.yaml b/airflow/api_connexion/openapi/v1.yaml
similarity index 94%
rename from openapi.yaml
rename to airflow/api_connexion/openapi/v1.yaml
index d997b1c..a3f895d 100644
--- a/openapi.yaml
+++ b/airflow/api_connexion/openapi/v1.yaml
@@ -38,7 +38,7 @@ paths:
/connections:
get:
summary: Get all connection entries
- operationId: getConnections
+ operationId: airflow.api_connexion.endpoints.connection_endpoint.get_connections
tags: [Connection]
parameters:
- $ref: '#/components/parameters/PageLimit'
@@ -59,7 +59,7 @@ paths:
post:
summary: Create connection entry
- operationId: createConnection
+ operationId: airflow.api_connexion.endpoints.connection_endpoint.post_connection
tags: [Connection]
requestBody:
required: true
@@ -87,7 +87,7 @@ paths:
get:
summary: Get a connection entry
- operationId: getConnection
+ operationId: airflow.api_connexion.endpoints.connection_endpoint.get_connection
tags: [Connection]
responses:
'200':
@@ -105,7 +105,7 @@ paths:
patch:
summary: Update a connection entry
- operationId: updateConnection
+ operationId: airflow.api_connexion.endpoints.connection_endpoint.patch_connection
tags: [Connection]
parameters:
- $ref: '#/components/parameters/UpdateMask'
@@ -134,7 +134,7 @@ paths:
delete:
summary: Delete a connection entry
- operationId: deleteConnection
+ operationId: airflow.api_connexion.endpoints.connection_endpoint.delete_connection
tags: [Connection]
responses:
'204':
@@ -149,7 +149,7 @@ paths:
/dags:
get:
summary: Get all DAGs
- operationId: getDags
+ operationId: airflow.api_connexion.endpoints.dag_endpoint.get_dags
tags: [DAG]
parameters:
- $ref: '#/components/parameters/PageLimit'
@@ -176,7 +176,7 @@ paths:
Presents only information available in database (DAGModel).
If you need detailed information, consider using GET /dags/{dag_id}/detail.
- operationId: getDag
+ operationId: airflow.api_connexion.endpoints.dag_endpoint.get_dag
tags: [DAG]
responses:
'200':
@@ -194,7 +194,7 @@ paths:
patch:
summary: Update a DAG
- operationId: updateDag
+ operationId: airflow.api_connexion.endpoints.dag_endpoint.patch_dag
tags: [DAG]
requestBody:
required: true
@@ -222,7 +222,7 @@ paths:
post:
summary: Clears a set of task instances associated with the DAG for a specified date range.
- operationId: clearTaskInstance
+ operationId: airflow.api_connexion.endpoints.task_instance_endpoint.post_clear_task_instances
tags: [DAG]
requestBody:
description: Parameters of action
@@ -254,7 +254,7 @@ paths:
summary: Get all DAG Runs
description: >
This endpoint allows specifying `~` as the dag_id to retrieve DAG Runs for all DAGs.
- operationId: getDagRuns
+ operationId: airflow.api_connexion.endpoints.dag_run_endpoint.get_dag_runs
tags: [DAGRun]
parameters:
- $ref: '#/components/parameters/PageLimit'
@@ -283,7 +283,7 @@ paths:
description: >
This endpoint is a POST to allow filtering across a large number of DAG IDs, where as a GET it
would run in to maximum HTTP request URL lengthlimits
- operationId: getDagRunsBatch
+ operationId: airflow.api_connexion.endpoints.dag_run_endpoint.get_dag_runs_batch
tags: [DAGRun]
requestBody:
required: true
@@ -314,7 +314,7 @@ paths:
get:
summary: Get a DAG Run
- operationId: getDagRun
+ operationId: airflow.api_connexion.endpoints.dag_run_endpoint.get_dag_run
tags: [DAGRun]
responses:
'200':
@@ -332,7 +332,7 @@ paths:
post:
summary: Trigger a DAG Run
- operationId: createDagRun
+ operationId: airflow.api_connexion.endpoints.dag_run_endpoint.post_dag_run
tags: [DAGRun]
requestBody:
required: true
@@ -358,7 +358,7 @@ paths:
patch:
summary: Update a DAG Run
- operationId: updateDagRun
+ operationId: airflow.api_connexion.endpoints.dag_run_endpoint.patch_dag_run
tags: [DAGRun]
parameters:
- $ref: '#/components/parameters/UpdateMask'
@@ -387,7 +387,7 @@ paths:
delete:
summary: Delete a DAG Run
- operationId: deleteDagRun
+ operationId: airflow.api_connexion.endpoints.dag_run_endpoint.delete_dag_run
tags: [DAGRun]
responses:
'204':
@@ -402,7 +402,7 @@ paths:
/eventLogs:
get:
summary: Get all log entries from event log
- operationId: getEventLog
+ operationId: airflow.api_connexion.endpoints.event_log_endpoint.get_event_logs
tags: [EventLog]
parameters:
- $ref: '#/components/parameters/PageLimit'
@@ -427,7 +427,7 @@ paths:
get:
summary: Get a log entry
- operationId: getEventLogEntry
+ operationId: airflow.api_connexion.endpoints.event_log_endpoint.get_event_log
tags: [EventLog]
responses:
'200':
@@ -446,7 +446,7 @@ paths:
/importErrors:
get:
summary: Get all import errors
- operationId: getImportErrors
+ operationId: airflow.api_connexion.endpoints.import_errror_endpoint.get_import_errors
tags: [ImportError]
parameters:
- $ref: '#/components/parameters/PageLimit'
@@ -471,7 +471,7 @@ paths:
get:
summary: Get an import error
- operationId: getImportError
+ operationId: airflow.api_connexion.endpoints.import_errror_endpoint.get_import_error
tags: [ImportError]
responses:
'200':
@@ -489,7 +489,7 @@ paths:
delete:
summary: Delete an import error
- operationId: deleteImportError
+ operationId: airflow.api_connexion.endpoints.import_errror_endpoint.delete_import_error
tags: [ImportError]
responses:
'204':
@@ -504,7 +504,7 @@ paths:
/pools:
get:
summary: Get all pools
- operationId: getPools
+ operationId: airflow.api_connexion.endpoints.pool_endpoint.get_pools
tags: [Pool]
parameters:
- $ref: '#/components/parameters/PageLimit'
@@ -525,7 +525,7 @@ paths:
post:
summary: Create a pool
- operationId: createPool
+ operationId: airflow.api_connexion.endpoints.pool_endpoint.post_pool
tags: [Pool]
requestBody:
required: true
@@ -553,7 +553,7 @@ paths:
get:
summary: Get a pool
- operationId: getPool
+ operationId: airflow.api_connexion.endpoints.pool_endpoint.get_pool
tags: [Pool]
responses:
'200':
@@ -571,7 +571,7 @@ paths:
patch:
summary: Update a pool
- operationId: updatePool
+ operationId: airflow.api_connexion.endpoints.pool_endpoint.patch_pool
tags: [Pool]
parameters:
- $ref: '#/components/parameters/UpdateMask'
@@ -600,7 +600,7 @@ paths:
delete:
summary: Delete a pool
- operationId: deletePool
+ operationId: airflow.api_connexion.endpoints.pool_endpoint.delete_pool
tags: [Pool]
responses:
'204':
@@ -632,7 +632,7 @@ paths:
description: >
This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve DAG Runs for all DAGs
and DAG Runs.
- operationId: getTaskInstances
+ operationId: airflow.api_connexion.endpoints.task_instance_endpoint.get_task_instances
tags: [TaskInstance]
parameters:
- $ref: '#/components/parameters/PageLimit'
@@ -659,7 +659,7 @@ paths:
get:
summary: Get a task instance
- operationId: getTaskInstance
+ operationId: airflow.api_connexion.endpoints.task_instance_endpoint.get_task_instance
tags: [TaskInstance]
responses:
'200':
@@ -681,7 +681,7 @@ paths:
description: >
This endpoint is a POST to allow filtering across a large number of DAG IDs, where as a GET it
would run in to maximum HTTP request URL lengthlimits
- operationId: getTaskInstancesBatch
+ operationId: airflow.api_connexion.endpoints.task_instance_endpoint.get_task_instances_batch
tags: [TaskInstance]
requestBody:
required: true
@@ -710,7 +710,7 @@ paths:
get:
summary: Get all variables
description: The collection does not contain data. To get data, you must get a single entity.
- operationId: getVariables
+ operationId: airflow.api_connexion.endpoints.variable_endpoint.get_variables
tags: [Variable]
parameters:
- $ref: '#/components/parameters/PageLimit'
@@ -731,7 +731,7 @@ paths:
post:
summary: Create a variable
- operationId: createVariable
+ operationId: airflow.api_connexion.endpoints.variable_endpoint.post_variables
tags: [Variable]
requestBody:
required: true
@@ -759,7 +759,7 @@ paths:
get:
summary: Get a variable by key
- operationId: getVariable
+ operationId: airflow.api_connexion.endpoints.variable_endpoint.get_variable
tags: [Variable]
responses:
'200':
@@ -777,7 +777,7 @@ paths:
patch:
summary: Update a variable by key
- operationId: updateVariable
+ operationId: airflow.api_connexion.endpoints.variable_endpoint.patch_variable
tags: [Variable]
parameters:
- $ref: '#/components/parameters/UpdateMask'
@@ -806,7 +806,7 @@ paths:
delete:
summary: Delete variable
- operationId: deleteVariable
+ operationId: airflow.api_connexion.endpoints.variable_endpoint.delete_variable
tags: [Variable]
responses:
'204':
@@ -829,7 +829,7 @@ paths:
description:
This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCOM entries for
for all DAGs, DAG Runs and task instances.
- operationId: getXComEntry
+ operationId: airflow.api_connexion.endpoints.xcom_endpoint.get_xcom_entries
tags: [XCom]
parameters:
- $ref: '#/components/parameters/PageLimit'
@@ -850,7 +850,7 @@ paths:
post:
summary: Create an XCom entry
- operationId: updateXComEntry
+ operationId: airflow.api_connexion.endpoints.xcom_endpoint.post_xcom_entries
tags: [XCom]
requestBody:
required: true
@@ -881,7 +881,7 @@ paths:
get:
summary: Get an XCom entry
- operationId: getXComValue
+ operationId: airflow.api_connexion.endpoints.xcom_endpoint.get_xcom_entry
tags: [XCom]
responses:
'200':
@@ -899,7 +899,7 @@ paths:
patch:
summary: Update an XCom entry
- operationId: updateXComValue
+ operationId: airflow.api_connexion.endpoints.xcom_endpoint.patch_xcom_entry
tags: [XCom]
parameters:
- $ref: '#/components/parameters/UpdateMask'
@@ -928,7 +928,7 @@ paths:
delete:
summary: Delete an XCom entry
- operationId: deleteXComValue
+ operationId: airflow.api_connexion.endpoints.xcom_endpoint.delete_xcom_entry
tags: [XCom]
responses:
'204':
@@ -949,7 +949,7 @@ paths:
get:
summary: Get extra links for task instance
- operationId: getExtraLinks
+ operationId: airflow.api_connexion.endpoints.extra_link_endpoint.get_extra_links
tags: [TaskInstance]
responses:
'200':
@@ -977,7 +977,7 @@ paths:
get:
summary: Get logs for a task instance
description: Get logs for a specific task instance and its try number
- operationId: getLogs
+ operationId: airflow.api_connexion.endpoints.log_endpoint.get_log
tags: [TaskInstance]
responses:
'200':
@@ -1009,7 +1009,7 @@ paths:
get:
summary: Get a simplified representation of DAG.
- operationId: getDagDetail
+ operationId: airflow.api_connexion.endpoints.dag_endpoint.get_dag_details
description: >
The response contains many DAG attributes, so the response can be large.
If possible, consider using GET /dags/{dag_id}.
@@ -1034,7 +1034,7 @@ paths:
get:
summary: Get tasks for DAG
- operationId: getTasks
+ operationId: airflow.api_connexion.endpoints.task_endpoint.get_tasks
tags: [DAG]
responses:
'200':
@@ -1057,7 +1057,7 @@ paths:
get:
summary: Get simplified representation of a task.
- operationId: getTask
+ operationId: airflow.api_connexion.endpoints.task_endpoint.get_task
tags: [DAG]
responses:
'200':
@@ -1079,7 +1079,7 @@ paths:
get:
summary: Get source code using file token
- operationId: getDagSource
+ operationId: airflow.api_connexion.endpoints.dag_source_endpoint.get_dag_source
tags: [DAG]
responses:
'200':
@@ -1101,7 +1101,7 @@ paths:
/config:
get:
summary: Get current configuration
- operationId: getConfig
+ operationId: airflow.api_connexion.endpoints.config_endpoint.get_config
tags: [Config]
parameters:
- $ref: '#/components/parameters/PageLimit'
@@ -1121,6 +1121,20 @@ paths:
'403':
$ref: '#/components/responses/PermissionDenied'
+ /health:
+ get:
+ summary: Checks if the API works
+ operationId: airflow.api_connexion.endpoints.health_endpoint.get_health
+ tags: [Monitoring]
+ responses:
+ '200':
+ description: It should always return "OK"
+ content:
+ text/plain:
+ schema:
+ type: string
+
+
components:
# Reusable schemas (data models)
schemas:
@@ -2401,6 +2415,7 @@ tags:
- name: DAGRun
- name: EventLog
- name: ImportError
+ - name: Monitoring
- name: Pool
- name: TaskInstance
- name: Variable
diff --git a/airflow/config_templates/airflow_local_settings.py b/airflow/config_templates/airflow_local_settings.py
index 485be51..c91e8ac 100644
--- a/airflow/config_templates/airflow_local_settings.py
+++ b/airflow/config_templates/airflow_local_settings.py
@@ -100,7 +100,7 @@ DEFAULT_LOGGING_CONFIG: Dict[str, Any] = {
'handler': ['console'],
'level': FAB_LOG_LEVEL,
'propagate': True,
- }
+ },
},
'root': {
'handlers': ['console'],
diff --git a/airflow/www/app.py b/airflow/www/app.py
index eb5589f..357ce42 100644
--- a/airflow/www/app.py
+++ b/airflow/www/app.py
@@ -20,9 +20,11 @@ import datetime
import logging
import socket
from datetime import timedelta
+from os import path
from typing import Optional
from urllib.parse import urlparse
+import connexion
import flask
import flask_login
import pendulum
@@ -42,6 +44,8 @@ from airflow.www.static_config import configure_manifest_files
app: Optional[Flask] = None
csrf = CSRFProtect()
+# airflow/www/app.py => airflow/
+ROOT_APP_DIR = path.abspath(path.join(path.dirname(__file__), path.pardir))
log = logging.getLogger(__name__)
@@ -237,9 +241,21 @@ def create_app(config=None, testing=False, app_name="Airflow"):
app.register_error_handler(500, views.show_traceback)
app.register_error_handler(404, views.circles)
+ def init_api_connexion(app: Flask):
+ spec_dir = path.join(ROOT_APP_DIR, 'api_connexion', 'openapi')
+ connexion_app = connexion.App(__name__, specification_dir=spec_dir, skip_error_handlers=True)
+ connexion_app.app = app
+ connexion_app.add_api(
+ specification='v1.yaml',
+ base_path='/api/v1',
+ validate_responses=True,
+ strict_validation=False
+ )
+
init_views(appbuilder)
init_plugin_blueprints(app)
init_error_handlers(app)
+ init_api_connexion(app)
if conf.getboolean('webserver', 'UPDATE_FAB_PERMS'):
security_manager = appbuilder.sm
diff --git a/requirements/requirements-python3.6.txt b/requirements/requirements-python3.6.txt
index 67f2dee..b972467 100644
--- a/requirements/requirements-python3.6.txt
+++ b/requirements/requirements-python3.6.txt
@@ -12,7 +12,7 @@ Flask-OpenID==1.2.5
Flask-SQLAlchemy==2.4.3
Flask-WTF==0.14.3
Flask==1.1.2
-GitPython==3.1.2
+GitPython==3.1.3
HeapDict==1.0.1
JPype1==0.7.5
JayDeBeApi==1.2.1
@@ -37,7 +37,7 @@ adal==1.2.3
aiohttp==3.6.2
alabaster==0.7.12
alembic==1.4.2
-amqp==2.5.2
+amqp==2.6.0
analytics-python==1.2.9
ansiwrap==0.8.4
apipkg==1.5
@@ -45,7 +45,7 @@ apispec==1.3.3
appdirs==1.4.4
argcomplete==1.11.1
asn1crypto==1.3.0
-astroid==2.3.3
+astroid==2.4.1
async-generator==1.10
async-timeout==3.0.1
atlasclient==1.0.0
@@ -72,15 +72,15 @@ beautifulsoup4==4.7.1
billiard==3.6.3.0
black==19.10b0
blinker==1.4
-boto3==1.13.19
+boto3==1.13.20
boto==2.49.0
-botocore==1.16.19
+botocore==1.16.20
bowler==0.8.0
cached-property==1.5.1
cachetools==4.1.0
cassandra-driver==3.20.2
cattrs==1.0.0
-celery==4.4.2
+celery==4.4.3
certifi==2020.4.5.1
cffi==1.14.0
cfgv==3.1.0
@@ -88,10 +88,12 @@ cfn-lint==0.32.1
cgroupspy==0.1.6
chardet==3.0.4
click==6.7
+clickclick==1.2.2
cloudant==2.13.0
cloudpickle==1.4.1
colorama==0.4.3
colorlog==4.0.2
+connexion==2.7.0
contextvars==2.4
coverage==5.1
croniter==0.3.32
@@ -178,7 +180,7 @@ hmsclient==0.1.1
httplib2==0.18.1
humanize==0.5.1
hvac==0.10.3
-identify==1.4.17
+identify==1.4.18
idna-ssl==1.1.0
idna==2.9
ijson==2.6.1
@@ -206,13 +208,13 @@ jsonschema==3.2.0
junit-xml==1.9
jupyter-client==6.1.3
jupyter-core==4.6.3
-kombu==4.6.8
+kombu==4.6.9
kubernetes==11.0.0
lazy-object-proxy==1.4.3
ldap3==2.7
lockfile==0.12.2
marshmallow-enum==1.5.1
-marshmallow-sqlalchemy==0.23.0
+marshmallow-sqlalchemy==0.23.1
marshmallow==2.21.0
mccabe==0.6.1
mock==4.0.2
@@ -239,6 +241,7 @@ nteract-scrapbook==0.4.1
ntlm-auth==1.4.0
numpy==1.18.4
oauthlib==2.1.0
+openapi-spec-validator==0.2.8
oscrypto==1.2.0
packaging==20.4
pandas-gbq==0.13.2
@@ -307,7 +310,7 @@ pytzdata==2019.3
pywinrm==0.4.1
pyzmq==19.0.1
qds-sdk==1.15.2
-redis==3.5.2
+redis==3.5.3
regex==2020.5.14
requests-kerberos==0.12.0
requests-mock==1.8.0
@@ -330,7 +333,7 @@ six==1.15.0
slackclient==2.6.2
smmap==3.0.4
snowballstemmer==2.0.0
-snowflake-connector-python==2.2.6
+snowflake-connector-python==2.2.7
snowflake-sqlalchemy==1.2.3
sortedcontainers==2.1.0
soupsieve==2.0.1
@@ -347,10 +350,11 @@ sphinxcontrib-httpdomain==1.7.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.4
-spython==0.0.82
+spython==0.0.84
sshpubkeys==3.1.0
sshtunnel==0.1.5
statsd==3.3.0
+swagger-ui-bundle==0.0.6
tableauserverclient==0.9
tabulate==0.8.7
tblib==1.6.0
@@ -376,12 +380,12 @@ vertica-python==0.10.4
vine==1.3.0
virtualenv==20.0.21
watchtower==0.7.3
-wcwidth==0.1.9
+wcwidth==0.2.2
websocket-client==0.57.0
-wrapt==1.11.2
+wrapt==1.12.1
xmltodict==0.12.0
yamllint==1.23.0
-yandexcloud==0.39.0
+yandexcloud==0.40.0
yarl==1.4.2
zdesk==2.7.1
zict==2.0.0
diff --git a/requirements/requirements-python3.7.txt b/requirements/requirements-python3.7.txt
index 6340187..ee08143 100644
--- a/requirements/requirements-python3.7.txt
+++ b/requirements/requirements-python3.7.txt
@@ -12,7 +12,7 @@ Flask-OpenID==1.2.5
Flask-SQLAlchemy==2.4.3
Flask-WTF==0.14.3
Flask==1.1.2
-GitPython==3.1.2
+GitPython==3.1.3
HeapDict==1.0.1
JPype1==0.7.5
JayDeBeApi==1.2.1
@@ -37,7 +37,7 @@ adal==1.2.3
aiohttp==3.6.2
alabaster==0.7.12
alembic==1.4.2
-amqp==2.5.2
+amqp==2.6.0
analytics-python==1.2.9
ansiwrap==0.8.4
apipkg==1.5
@@ -72,15 +72,15 @@ beautifulsoup4==4.7.1
billiard==3.6.3.0
black==19.10b0
blinker==1.4
-boto3==1.13.19
+boto3==1.13.20
boto==2.49.0
-botocore==1.16.19
+botocore==1.16.20
bowler==0.8.0
cached-property==1.5.1
cachetools==4.1.0
cassandra-driver==3.20.2
cattrs==1.0.0
-celery==4.4.2
+celery==4.4.3
certifi==2020.4.5.1
cffi==1.14.0
cfgv==3.1.0
@@ -88,10 +88,12 @@ cfn-lint==0.32.1
cgroupspy==0.1.6
chardet==3.0.4
click==6.7
+clickclick==1.2.2
cloudant==2.13.0
cloudpickle==1.4.1
colorama==0.4.3
colorlog==4.0.2
+connexion==2.7.0
coverage==5.1
croniter==0.3.32
cryptography==2.9.2
@@ -177,7 +179,7 @@ hmsclient==0.1.1
httplib2==0.18.1
humanize==0.5.1
hvac==0.10.3
-identify==1.4.17
+identify==1.4.18
idna==2.9
ijson==2.6.1
imagesize==1.2.0
@@ -202,7 +204,7 @@ jsonschema==3.2.0
junit-xml==1.9
jupyter-client==6.1.3
jupyter-core==4.6.3
-kombu==4.6.8
+kombu==4.6.9
kubernetes==11.0.0
lazy-object-proxy==1.4.3
ldap3==2.7
@@ -235,6 +237,7 @@ nteract-scrapbook==0.4.1
ntlm-auth==1.4.0
numpy==1.18.4
oauthlib==2.1.0
+openapi-spec-validator==0.2.8
oscrypto==1.2.0
packaging==20.4
pandas-gbq==0.13.2
@@ -302,7 +305,7 @@ pytzdata==2019.3
pywinrm==0.4.1
pyzmq==19.0.1
qds-sdk==1.15.2
-redis==3.5.2
+redis==3.5.3
regex==2020.5.14
requests-kerberos==0.12.0
requests-mock==1.8.0
@@ -325,7 +328,7 @@ six==1.15.0
slackclient==2.6.2
smmap==3.0.4
snowballstemmer==2.0.0
-snowflake-connector-python==2.2.6
+snowflake-connector-python==2.2.7
snowflake-sqlalchemy==1.2.3
sortedcontainers==2.1.0
soupsieve==2.0.1
@@ -342,10 +345,11 @@ sphinxcontrib-httpdomain==1.7.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.4
-spython==0.0.82
+spython==0.0.84
sshpubkeys==3.1.0
sshtunnel==0.1.5
statsd==3.3.0
+swagger-ui-bundle==0.0.6
tableauserverclient==0.9
tabulate==0.8.7
tblib==1.6.0
@@ -370,12 +374,12 @@ vertica-python==0.10.4
vine==1.3.0
virtualenv==20.0.21
watchtower==0.7.3
-wcwidth==0.1.9
+wcwidth==0.2.2
websocket-client==0.57.0
wrapt==1.12.1
xmltodict==0.12.0
yamllint==1.23.0
-yandexcloud==0.39.0
+yandexcloud==0.40.0
yarl==1.4.2
zdesk==2.7.1
zict==2.0.0
diff --git a/requirements/setup-3.6.md5 b/requirements/setup-3.6.md5
index 2469aed..e975648 100644
--- a/requirements/setup-3.6.md5
+++ b/requirements/setup-3.6.md5
@@ -1 +1 @@
-90faec7dd99759a19a20234cb2c09190 /opt/airflow/setup.py
+a26532c0c3d577d657b5159cf68d15ee /opt/airflow/setup.py
diff --git a/requirements/setup-3.7.md5 b/requirements/setup-3.7.md5
index 2469aed..e975648 100644
--- a/requirements/setup-3.7.md5
+++ b/requirements/setup-3.7.md5
@@ -1 +1 @@
-90faec7dd99759a19a20234cb2c09190 /opt/airflow/setup.py
+a26532c0c3d577d657b5159cf68d15ee /opt/airflow/setup.py
diff --git a/setup.py b/setup.py
index 701b838..7851532 100644
--- a/setup.py
+++ b/setup.py
@@ -192,7 +192,7 @@ azure = [
'azure-batch>=8.0.0',
'azure-cosmos>=3.0.1,<4',
'azure-datalake-store>=0.0.45',
- 'azure-kusto-data>=0.0.43',
+ 'azure-kusto-data>=0.0.43,<0.1',
'azure-mgmt-containerinstance>=1.5.0',
'azure-mgmt-datalake-store>=0.5.0',
'azure-mgmt-resource>=2.2.0',
@@ -684,6 +684,7 @@ INSTALL_REQUIREMENTS = [
'cached_property~=1.5',
'cattrs~=1.0',
'colorlog==4.0.2',
+ 'connexion[swagger-ui,flask]>=2.6.0,<3',
'croniter>=0.3.17, <0.4',
'cryptography>=0.9.3',
'dill>=0.2.2, <0.4',
@@ -741,6 +742,7 @@ def do_setup():
package_data={
'': ['airflow/alembic.ini', "airflow/git_version", "*.ipynb",
"airflow/providers/cncf/kubernetes/example_dags/*.yaml"],
+ 'airflow.api_connexion.openapi': ['*.yaml'],
'airflow.serialization': ["*.json"],
},
include_package_data=True,
diff --git a/MANIFEST.in b/tests/api_connexion/__init__.py
similarity index 59%
copy from MANIFEST.in
copy to tests/api_connexion/__init__.py
index c29a6a8..13a8339 100644
--- a/MANIFEST.in
+++ b/tests/api_connexion/__init__.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -15,22 +14,3 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
diff --git a/MANIFEST.in b/tests/api_connexion/endpoints/__init__.py
similarity index 59%
copy from MANIFEST.in
copy to tests/api_connexion/endpoints/__init__.py
index c29a6a8..13a8339 100644
--- a/MANIFEST.in
+++ b/tests/api_connexion/endpoints/__init__.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -15,22 +14,3 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
diff --git a/MANIFEST.in b/tests/api_connexion/endpoints/test_config_endpoint.py
similarity index 59%
copy from MANIFEST.in
copy to tests/api_connexion/endpoints/test_config_endpoint.py
index c29a6a8..d0c9efb 100644
--- a/MANIFEST.in
+++ b/tests/api_connexion/endpoints/test_config_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -15,22 +14,23 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestGetConfig(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/config")
+ assert response.status_code == 200
diff --git a/tests/api_connexion/endpoints/test_connection_endpoint.py b/tests/api_connexion/endpoints/test_connection_endpoint.py
new file mode 100644
index 0000000..bb191b3
--- /dev/null
+++ b/tests/api_connexion/endpoints/test_connection_endpoint.py
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestConnectionEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+
+class TestDeleteConnection(TestConnectionEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.delete("/api/v1/connections/1")
+ assert response.status_code == 200
+
+
+class TestGetConnection(TestConnectionEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/connection/1")
+ assert response.status_code == 200
+
+
+class TestGetConnections(TestConnectionEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/connections/")
+ assert response.status_code == 200
+
+
+class TestPatchConnection(TestConnectionEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.patch("/api/v1/connections/1")
+ assert response.status_code == 200
+
+
+class TestPostConnection(TestConnectionEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.post("/api/v1/connections/")
+ assert response.status_code == 200
diff --git a/tests/api_connexion/endpoints/test_dag_endpoint.py b/tests/api_connexion/endpoints/test_dag_endpoint.py
new file mode 100644
index 0000000..5234d36
--- /dev/null
+++ b/tests/api_connexion/endpoints/test_dag_endpoint.py
@@ -0,0 +1,59 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestDagEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+
+class TestGetDag(TestDagEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/dag/1/")
+ assert response.status_code == 200
+
+
+class TestGetDagDetails(TestDagEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/dag/TEST_DAG_ID/details")
+ assert response.status_code == 200
+
+
+class TestGetDags(TestDagEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/dags/1")
+ assert response.status_code == 200
+
+
+class TestPatchDag(TestDagEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.patch("/api/v1/dags/1")
+ assert response.status_code == 200
diff --git a/tests/api_connexion/endpoints/test_dag_run_endpoint.py b/tests/api_connexion/endpoints/test_dag_run_endpoint.py
new file mode 100644
index 0000000..90f907d
--- /dev/null
+++ b/tests/api_connexion/endpoints/test_dag_run_endpoint.py
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestDagRunEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+
+class TestDeleteDagRun(TestDagRunEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+ assert response.status_code == 204
+
+
+class TestGetDagRun(TestDagRunEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+ assert response.status_code == 200
+
+
+class TestGetDagRuns(TestDagRunEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+ assert response.status_code == 200
+
+
+class TestPatchDagRun(TestDagRunEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.patch("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+ assert response.status_code == 200
+
+
+class TestPostDagRun(TestDagRunEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.post("/dags/TEST_DAG_ID/dagRuns")
+ assert response.status_code == 200
diff --git a/tests/api_connexion/endpoints/test_dag_source_endpoint.py b/tests/api_connexion/endpoints/test_dag_source_endpoint.py
new file mode 100644
index 0000000..9b07955
--- /dev/null
+++ b/tests/api_connexion/endpoints/test_dag_source_endpoint.py
@@ -0,0 +1,46 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestDagSourceEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+
+class TestGetSource(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/health")
+ assert response.status_code == 200
diff --git a/tests/api_connexion/endpoints/test_event_log_endpoint.py b/tests/api_connexion/endpoints/test_event_log_endpoint.py
new file mode 100644
index 0000000..8cf5408
--- /dev/null
+++ b/tests/api_connexion/endpoints/test_event_log_endpoint.py
@@ -0,0 +1,45 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestEventLogEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+
+class TestGetEventLog(TestEventLogEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/eventLogs/1")
+ assert response.status_code == 200
+
+
+class TestGetEventLogs(TestEventLogEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/eventLogs")
+ assert response.status_code == 200
diff --git a/MANIFEST.in b/tests/api_connexion/endpoints/test_extra_link_endpoint.py
similarity index 56%
copy from MANIFEST.in
copy to tests/api_connexion/endpoints/test_extra_link_endpoint.py
index c29a6a8..0c61b14 100644
--- a/MANIFEST.in
+++ b/tests/api_connexion/endpoints/test_extra_link_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -15,22 +14,25 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestGetExtraLinks(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get(
+ "/dags/TEST_DG_ID/dagRuns/TEST_DAG_RUN_ID/taskInstances/TEST_TASK_ID/links"
+ )
+ assert response.status_code == 200
diff --git a/MANIFEST.in b/tests/api_connexion/endpoints/test_health_endpoint.py
similarity index 59%
copy from MANIFEST.in
copy to tests/api_connexion/endpoints/test_health_endpoint.py
index c29a6a8..a0c1e2b 100644
--- a/MANIFEST.in
+++ b/tests/api_connexion/endpoints/test_health_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -15,22 +14,21 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+import unittest
+
+from airflow.www import app
+
+
+class TestGetHealthTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+ def test_should_response_200_and_ok(self):
+ response = self.client.get("/api/v1/health")
+ assert response.status_code == 200
+ assert response.data == b"OK"
diff --git a/tests/api_connexion/endpoints/test_import_errror_endpoint.py b/tests/api_connexion/endpoints/test_import_errror_endpoint.py
new file mode 100644
index 0000000..92cd896
--- /dev/null
+++ b/tests/api_connexion/endpoints/test_import_errror_endpoint.py
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestImportErrorEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+
+class TestDeleteImportError(TestImportErrorEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.delete("/api/v1/importErrors/1")
+ assert response.status_code == 204
+
+
+class TestGetImportError(TestImportErrorEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/importErrors/1")
+ assert response.status_code == 200
+
+
+class TestGetImportErrors(TestImportErrorEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/importErrors")
+ assert response.status_code == 200
diff --git a/MANIFEST.in b/tests/api_connexion/endpoints/test_log_endpoint.py
similarity index 56%
copy from MANIFEST.in
copy to tests/api_connexion/endpoints/test_log_endpoint.py
index c29a6a8..cbc564a 100644
--- a/MANIFEST.in
+++ b/tests/api_connexion/endpoints/test_log_endpoint.py
@@ -1,4 +1,3 @@
-#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -15,22 +14,25 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestGetLog(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
-include NOTICE
-include LICENSE
-include CHANGELOG.txt
-include README.md
-graft licenses
-graft airflow/www
-graft airflow/www/static
-graft airflow/www/templates
-graft airflow/_vendor/
-include airflow/alembic.ini
-include airflow/git_version
-include airflow/serialization/schema.json
-graft scripts/systemd
-graft scripts/upstart
-graft airflow/config_templates
-recursive-exclude airflow/www/node_modules *
-global-exclude __pycache__ *.pyc
-include airflow/providers/cncf/kubernetes/example_dags/example_spark_kubernetes_operator_spark_pi.yaml
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get(
+ "/dags/TEST_DG_ID/dagRuns/TEST_DAG_RUN_ID/taskInstances/TEST_TASK_ID/logs/3"
+ )
+ assert response.status_code == 200
diff --git a/tests/api_connexion/endpoints/test_pool_endpoint.py b/tests/api_connexion/endpoints/test_pool_endpoint.py
new file mode 100644
index 0000000..d3ed9ea
--- /dev/null
+++ b/tests/api_connexion/endpoints/test_pool_endpoint.py
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestPoolEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+
+class TestDeletePool(TestPoolEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.delete("/api/v1/pools/TEST_POOL_NAME")
+ assert response.status_code == 204
+
+
+class TestGetPool(TestPoolEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/pools/TEST_POOL_NAME")
+ assert response.status_code == 200
+
+
+class TestGetPools(TestPoolEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/pools")
+ assert response.status_code == 200
+
+
+class TestPatchPool(TestPoolEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.patch("/api/v1/pools/TEST_POOL_NAME")
+ assert response.status_code == 200
+
+
+class TestPostPool(TestPoolEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.post("/api/v1/pool")
+ assert response.status_code == 200
diff --git a/tests/api_connexion/endpoints/test_task_endpoint.py b/tests/api_connexion/endpoints/test_task_endpoint.py
new file mode 100644
index 0000000..ab6b649
--- /dev/null
+++ b/tests/api_connexion/endpoints/test_task_endpoint.py
@@ -0,0 +1,45 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestTaskEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+
+class TestGetTask(TestTaskEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/dags/TEST_DAG_ID/tasks/TEST_TASK_ID")
+ assert response.status_code == 200
+
+
+class TestGetTasks(TestTaskEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/dags/TEST_DAG_ID/tasks")
+ assert response.status_code == 200
diff --git a/tests/api_connexion/endpoints/test_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_task_instance_endpoint.py
new file mode 100644
index 0000000..0d0df43
--- /dev/null
+++ b/tests/api_connexion/endpoints/test_task_instance_endpoint.py
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestTaskInstanceEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+
+class TestGetTaskInstance(TestTaskInstanceEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get(
+ "/api/v1/dags/TEST_DG_ID/dagRuns/TEST_DAG_RUN_ID/taskInstances/TEST_TASK_ID"
+ )
+ assert response.status_code == 200
+
+
+class TestGetTaskInstances(TestTaskInstanceEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/dags/TEST_DAG_ID/taskInstances")
+ assert response.status_code == 200
+
+
+class TestGetTaskInstancesBatch(TestTaskInstanceEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.post("/api/v1/dags/~/taskInstances/list")
+ assert response.status_code == 200
+
+
+class TestPostClearTaskInstances(TestTaskInstanceEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.post("/api/v1/dags/clearTaskInstances")
+ assert response.status_code == 200
diff --git a/tests/api_connexion/endpoints/test_variable_endpoint.py b/tests/api_connexion/endpoints/test_variable_endpoint.py
new file mode 100644
index 0000000..e7253c1
--- /dev/null
+++ b/tests/api_connexion/endpoints/test_variable_endpoint.py
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TestVariableEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+
+class TestDeleteVariable(TestVariableEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.delete("/api/v1/variables/TEST_VARIABLE_KEY")
+ assert response.status_code == 204
+
+
+class TestGetVariable(TestVariableEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/variables/TEST_VARIABLE_KEY")
+ assert response.status_code == 200
+
+
+class TestGetVariables(TestVariableEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get("/api/v1/variables")
+ assert response.status_code == 200
+
+
+class TestPatchVariable(TestVariableEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.patch("/api/v1/variables/TEST_VARIABLE_KEY")
+ assert response.status_code == 200
+
+
+class TestPostVariables(TestVariableEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.post("/api/v1/variables")
+ assert response.status_code == 200
diff --git a/tests/api_connexion/endpoints/test_xcom_endpoint.py b/tests/api_connexion/endpoints/test_xcom_endpoint.py
new file mode 100644
index 0000000..2eabdcd
--- /dev/null
+++ b/tests/api_connexion/endpoints/test_xcom_endpoint.py
@@ -0,0 +1,76 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import pytest
+
+from airflow.www import app
+
+
+class TesXComEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.app = app.create_app(testing=True) # type:ignore
+
+ def setUp(self) -> None:
+ self.client = self.app.test_client() # type:ignore
+
+
+class TestDeleteXComEntry(TesXComEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.delete(
+ "/dags/TEST_DAG_ID}/taskInstances/TEST_TASK_ID/2005-04-02T21:37:42Z/xcomEntries/XCOM_KEY"
+ )
+ assert response.status_code == 204
+
+
+class TestGetXComEntry(TesXComEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get(
+ "/dags/TEST_DAG_ID}/taskInstances/TEST_TASK_ID/2005-04-02T21:37:42Z/xcomEntries/XCOM_KEY"
+ )
+ assert response.status_code == 200
+
+
+class TestGetXComEntries(TesXComEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.get(
+ "/dags/TEST_DAG_ID}/taskInstances/TEST_TASK_ID/2005-04-02T21:37:42Z/xcomEntries/"
+ )
+ assert response.status_code == 200
+
+
+class TestPatchXComEntry(TesXComEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.patch(
+ "/dags/TEST_DAG_ID}/taskInstances/TEST_TASK_ID/2005-04-02T21:37:42Z/xcomEntries"
+ )
+ assert response.status_code == 200
+
+
+class TestPostXComEntry(TesXComEndpoint):
+ @pytest.mark.skip(reason="Not implemented yet")
+ def test_should_response_200(self):
+ response = self.client.post(
+ "/dags/TEST_DAG_ID}/taskInstances/TEST_TASK_ID/2005-04-02T21:37:42Z/xcomEntries/XCOM_KEY"
+ )
+ assert response.status_code == 200
diff --git a/tests/www/test_app.py b/tests/www/test_app.py
index a16562b..3e46abf 100644
--- a/tests/www/test_app.py
+++ b/tests/www/test_app.py
@@ -28,6 +28,10 @@ from tests.test_utils.config import conf_vars
class TestApp(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls) -> None:
+ from airflow import settings
+ settings.configure_orm()
@conf_vars({
('webserver', 'enable_proxy_fix'): 'True',