You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2020/06/05 09:54:00 UTC

[GitHub] [airflow] ephraimbuddy opened a new pull request #9153: [WIP] add readonly endpoints for dagruns

ephraimbuddy opened a new pull request #9153:
URL: https://github.com/apache/airflow/pull/9153


   ---
   Closes: #8129 
   Make sure to mark the boxes below before creating PR: [x]
   
   - [ ] Description above provides context of the change
   - [ ] Unit tests coverage for changes (not needed for documentation changes)
   - [ ] Target Github ISSUE in description if exists
   - [ ] Commits follow "[How to write a good git commit message](http://chris.beams.io/posts/git-commit/)"
   - [ ] Relevant documentation is updated including usage instructions.
   - [ ] I will engage committers as explained in [Contribution Workflow Example](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#contribution-workflow-example).
   
   ---
   In case of fundamental code change, Airflow Improvement Proposal ([AIP](https://cwiki.apache.org/confluence/display/AIRFLOW/Airflow+Improvements+Proposals)) is needed.
   In case of a new dependency, check compliance with the [ASF 3rd Party License Policy](https://www.apache.org/legal/resolved.html#category-x).
   In case of backwards incompatible changes please leave a note in [UPDATING.md](https://github.com/apache/airflow/blob/master/UPDATING.md).
   Read the [Pull Request Guidelines](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#pull-request-guidelines) for more information.
   


----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r436576266



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,26 +35,471 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 1
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 3)

Review comment:
       ```suggestion
           self.assertEqual(response.json.get('total_entries'), 4)
   ```
   This should always return the amount of all items.
   <img width="497" alt="Screenshot 2020-06-08 at 11 44 19" src="https://user-images.githubusercontent.com/12058428/84016326-69d82300-a97d-11ea-952c-37c472d3e16f.png">
   




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r437881981



##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -26,18 +34,100 @@ def delete_dag_run():
     raise NotImplementedError("Not implemented yet.")
 
 
-def get_dag_run():
+@provide_session
+def get_dag_run(dag_id, dag_run_id, session):
     """
     Get a DAG Run.
     """
-    raise NotImplementedError("Not implemented yet.")
+    query = session.query(DagRun)
+    query = query.filter(DagRun.dag_id == dag_id)
+    query = query.filter(DagRun.run_id == dag_run_id)
+    dag_run = query.one_or_none()
+    if dag_run is None:
+        raise NotFound("DAGRun not found")
+    return dagrun_schema.dump(dag_run)
 
 
-def get_dag_runs():
+@provide_session
+def get_dag_runs(dag_id, session):
     """
     Get all DAG Runs.
     """
-    raise NotImplementedError("Not implemented yet.")
+    offset = request.args.get(parameters.page_offset, 0)
+    limit = min(int(request.args.get(parameters.page_limit, 100)), 100)
+
+    start_date_gte = parse_datetime_in_query(

Review comment:
       I will be using webargs. Connexion no longer validate date-format because of license of one of its library.
   https://github.com/zalando/connexion/issues/476




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r439078448



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,335 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.default_time = '2020-06-11T18:00:00+00:00'
+        self.default_time_2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running', extra_dag=False):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time_2),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        if extra_dag:
+            dagrun_extra = [DagRun(
+                dag_id='TEST_DAG_ID_' + str(i),
+                run_id='TEST_DAG_RUN_ID_' + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time_2),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            ) for i in range(3, 5)]
+            return [dagrun_model_1, dagrun_model_2] + dagrun_extra
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.default_time,
+                'external_trigger': True,
+                'start_date': self.default_time,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time_2,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run(extra_dag=True)
+        expected_dag_run_ids = ['TEST_DAG_ID', 'TEST_DAG_ID',
+                                "TEST_DAG_ID_3", "TEST_DAG_ID_4"]
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        dag_run_ids = [dag_run["dag_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time) + timedelta(minutes=i),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_18", "TEST_START_EXEC_DAY_19"],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_lte=2020-06-11T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_10"],

Review comment:
       I have a day off today, so I have limited options to check it out, but it can help you.
   https://github.com/apache/airflow/blob/master/TESTING.rst#tracking-sql-statements
   If something can be done at the SQL level, then SQL can do it too.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r438468200



##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -26,18 +34,66 @@ def delete_dag_run():
     raise NotImplementedError("Not implemented yet.")
 
 
-def get_dag_run():
+@provide_session
+def get_dag_run(dag_id, dag_run_id, session):
     """
     Get a DAG Run.
     """
-    raise NotImplementedError("Not implemented yet.")
+    query = session.query(DagRun)
+    query = query.filter(DagRun.dag_id == dag_id)
+    query = query.filter(DagRun.run_id == dag_run_id)
+    dag_run = query.one_or_none()
+    if dag_run is None:
+        raise NotFound("DAGRun not found")
+    return dagrun_schema.dump(dag_run)
 
 
-def get_dag_runs():
+@provide_session
+def get_dag_runs(dag_id, session):
     """
     Get all DAG Runs.
     """
-    raise NotImplementedError("Not implemented yet.")
+    offset = request.args.get(parameters.page_offset, 0)
+    limit = min(int(request.args.get(parameters.page_limit, 100)), 100)
+    start_date_gte = request.args.get(parameters.filter_start_date_gte, None)
+    start_date_lte = request.args.get(parameters.filter_start_date_lte, None)
+    execution_date_gte = request.args.get(parameters.filter_execution_date_gte, None)
+    execution_date_lte = request.args.get(parameters.filter_execution_date_lte, None)
+    end_date_gte = request.args.get(parameters.filter_end_date_gte, None)
+    end_date_lte = request.args.get(parameters.filter_end_date_lte, None)
+    query = session.query(DagRun)
+
+    #  This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs.
+    if dag_id != '~':
+        query = query.filter(DagRun.dag_id == dag_id)
+
+    # filter start date
+    if start_date_gte:
+        query = query.filter(DagRun.start_date >= timezone.parse(start_date_gte))
+
+    if start_date_lte:
+        query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte))
+
+    # filter execution date
+    if execution_date_gte:
+        query = query.filter(DagRun.execution_date >= timezone.parse(execution_date_gte))
+
+    if execution_date_lte:
+        query = query.filter(DagRun.execution_date <= timezone.parse(execution_date_lte))
+
+    # filter end date
+    if end_date_gte and not end_date_lte:

Review comment:
       ```suggestion
       if end_date_gte:
   ```




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r438012011



##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -26,18 +34,100 @@ def delete_dag_run():
     raise NotImplementedError("Not implemented yet.")
 
 
-def get_dag_run():
+@provide_session
+def get_dag_run(dag_id, dag_run_id, session):
     """
     Get a DAG Run.
     """
-    raise NotImplementedError("Not implemented yet.")
+    query = session.query(DagRun)
+    query = query.filter(DagRun.dag_id == dag_id)
+    query = query.filter(DagRun.run_id == dag_run_id)
+    dag_run = query.one_or_none()
+    if dag_run is None:
+        raise NotFound("DAGRun not found")
+    return dagrun_schema.dump(dag_run)
 
 
-def get_dag_runs():
+@provide_session
+def get_dag_runs(dag_id, session):
     """
     Get all DAG Runs.
     """
-    raise NotImplementedError("Not implemented yet.")
+    offset = request.args.get(parameters.page_offset, 0)
+    limit = min(int(request.args.get(parameters.page_limit, 100)), 100)
+
+    start_date_gte = parse_datetime_in_query(

Review comment:
       I'm glad you did research. We can change it in a separate change. Now it works, so it's great. In next changes, we will be able to make a refactor and remove repetitive code fragments.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r439341152



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,335 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.default_time = '2020-06-11T18:00:00+00:00'
+        self.default_time_2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running', extra_dag=False):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time_2),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        if extra_dag:
+            dagrun_extra = [DagRun(
+                dag_id='TEST_DAG_ID_' + str(i),
+                run_id='TEST_DAG_RUN_ID_' + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time_2),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            ) for i in range(3, 5)]
+            return [dagrun_model_1, dagrun_model_2] + dagrun_extra
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.default_time,
+                'external_trigger': True,
+                'start_date': self.default_time,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time_2,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run(extra_dag=True)
+        expected_dag_run_ids = ['TEST_DAG_ID', 'TEST_DAG_ID',
+                                "TEST_DAG_ID_3", "TEST_DAG_ID_4"]
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        dag_run_ids = [dag_run["dag_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time) + timedelta(minutes=i),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_18", "TEST_START_EXEC_DAY_19"],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_lte=2020-06-11T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_10"],

Review comment:
       This is the command I used to investigate:
   `pytest --trace-sql=num,sql,parameters --capture=no   tests/api_connexion/endpoints/test_dag_run_endpoint.py -k test_date_filters_gte_and_lte_1_api_v1_dags_TEST_DAG_ID_dagRuns_start_date_lte_2020_06_11T18_00_00_00_00`
   
   I set pdb debuger at this point in dag_run_endpoint:
   
   `    if start_date_lte:
           import pdb; pdb.set_trace()
           query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte))`
   
   Check image below:
   
   ![start_date_lte](https://user-images.githubusercontent.com/4122866/84494081-c484be00-aca0-11ea-9ec6-4a717cf1d15e.png)
   




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r438010702



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,347 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = '2020-06-11T18:00:00+00:00'
+        self.now2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running'):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now2),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now,
+                'external_trigger': True,
+                'start_date': self.now,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        assert result[0].dag_id == result[1].dag_id == 'TEST_DAG_ID'
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.now) + timedelta(minutes=i),
+                start_date=timezone.parse(self.now),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_DAG_RUN_ID8", "TEST_DAG_RUN_ID9"],

Review comment:
       What do you think to check the start/execution dates here? Identifiers poorly describe these assertions.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r439122700



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,335 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.default_time = '2020-06-11T18:00:00+00:00'
+        self.default_time_2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running', extra_dag=False):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time_2),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        if extra_dag:
+            dagrun_extra = [DagRun(
+                dag_id='TEST_DAG_ID_' + str(i),
+                run_id='TEST_DAG_RUN_ID_' + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time_2),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            ) for i in range(3, 5)]
+            return [dagrun_model_1, dagrun_model_2] + dagrun_extra
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.default_time,
+                'external_trigger': True,
+                'start_date': self.default_time,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time_2,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run(extra_dag=True)
+        expected_dag_run_ids = ['TEST_DAG_ID', 'TEST_DAG_ID',
+                                "TEST_DAG_ID_3", "TEST_DAG_ID_4"]
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        dag_run_ids = [dag_run["dag_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time) + timedelta(minutes=i),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_18", "TEST_START_EXEC_DAY_19"],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_lte=2020-06-11T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_10"],

Review comment:
       The fix https://github.com/apache/airflow/pull/9153/commits/7a72935b63bdb6c5622f8c00d4a3d2f996eaa2ae




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r438468343



##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -26,18 +34,66 @@ def delete_dag_run():
     raise NotImplementedError("Not implemented yet.")
 
 
-def get_dag_run():
+@provide_session
+def get_dag_run(dag_id, dag_run_id, session):
     """
     Get a DAG Run.
     """
-    raise NotImplementedError("Not implemented yet.")
+    query = session.query(DagRun)
+    query = query.filter(DagRun.dag_id == dag_id)
+    query = query.filter(DagRun.run_id == dag_run_id)
+    dag_run = query.one_or_none()
+    if dag_run is None:
+        raise NotFound("DAGRun not found")
+    return dagrun_schema.dump(dag_run)
 
 
-def get_dag_runs():
+@provide_session
+def get_dag_runs(dag_id, session):
     """
     Get all DAG Runs.
     """
-    raise NotImplementedError("Not implemented yet.")
+    offset = request.args.get(parameters.page_offset, 0)
+    limit = min(int(request.args.get(parameters.page_limit, 100)), 100)
+    start_date_gte = request.args.get(parameters.filter_start_date_gte, None)
+    start_date_lte = request.args.get(parameters.filter_start_date_lte, None)
+    execution_date_gte = request.args.get(parameters.filter_execution_date_gte, None)
+    execution_date_lte = request.args.get(parameters.filter_execution_date_lte, None)
+    end_date_gte = request.args.get(parameters.filter_end_date_gte, None)
+    end_date_lte = request.args.get(parameters.filter_end_date_lte, None)
+    query = session.query(DagRun)
+
+    #  This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs.
+    if dag_id != '~':
+        query = query.filter(DagRun.dag_id == dag_id)
+
+    # filter start date
+    if start_date_gte:
+        query = query.filter(DagRun.start_date >= timezone.parse(start_date_gte))
+
+    if start_date_lte:
+        query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte))
+
+    # filter execution date
+    if execution_date_gte:
+        query = query.filter(DagRun.execution_date >= timezone.parse(execution_date_gte))
+
+    if execution_date_lte:
+        query = query.filter(DagRun.execution_date <= timezone.parse(execution_date_lte))
+
+    # filter end date
+    if end_date_gte and not end_date_lte:
+        query = query.filter(DagRun.end_date >= timezone.parse(end_date_gte))
+
+    if end_date_lte and not end_date_gte:

Review comment:
       ```suggestion
       if end_date_lte:
   ```




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r439341152



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,335 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.default_time = '2020-06-11T18:00:00+00:00'
+        self.default_time_2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running', extra_dag=False):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time_2),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        if extra_dag:
+            dagrun_extra = [DagRun(
+                dag_id='TEST_DAG_ID_' + str(i),
+                run_id='TEST_DAG_RUN_ID_' + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time_2),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            ) for i in range(3, 5)]
+            return [dagrun_model_1, dagrun_model_2] + dagrun_extra
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.default_time,
+                'external_trigger': True,
+                'start_date': self.default_time,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time_2,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run(extra_dag=True)
+        expected_dag_run_ids = ['TEST_DAG_ID', 'TEST_DAG_ID',
+                                "TEST_DAG_ID_3", "TEST_DAG_ID_4"]
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        dag_run_ids = [dag_run["dag_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time) + timedelta(minutes=i),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_18", "TEST_START_EXEC_DAY_19"],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_lte=2020-06-11T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_10"],

Review comment:
       This is the command I used to investigate:
   `pytest --trace-sql=num,sql,parameters --capture=no   tests/api_connexion/endpoints/test_dag_run_endpoint.py -k test_date_filters_gte_and_lte_1_api_v1_dags_TEST_DAG_ID_dagRuns_start_date_lte_2020_06_11T18_00_00_00_00`
   
   I set pdb debuger at this point in dag_run_endpoint:
   
   `   
        if start_date_lte:
           import pdb; pdb.set_trace()
           query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte))
   `
   
   Check image below:
   
   ![start_date_lte](https://user-images.githubusercontent.com/4122866/84494081-c484be00-aca0-11ea-9ec6-4a717cf1d15e.png)
   




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj merged pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj merged pull request #9153:
URL: https://github.com/apache/airflow/pull/9153


   


----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r438048590



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,347 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = '2020-06-11T18:12:50.773601+00:00'
+        self.now2 = '2020-06-12T18:12:50.773601+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running'):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+            state=state
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now2),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now,
+                'external_trigger': True,
+                'start_date': self.now,
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)

Review comment:
       Check this https://github.com/apache/airflow/pull/9153/commits/c41c7713ef628ab30736ebeaea40fc0a4d4ba310




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r438048272



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,347 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = '2020-06-11T18:00:00+00:00'
+        self.now2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running'):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now2),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now,
+                'external_trigger': True,
+                'start_date': self.now,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        assert result[0].dag_id == result[1].dag_id == 'TEST_DAG_ID'
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.now) + timedelta(minutes=i),
+                start_date=timezone.parse(self.now),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_DAG_RUN_ID8", "TEST_DAG_RUN_ID9"],

Review comment:
       Check if this is ok https://github.com/apache/airflow/pull/9153/commits/5545d360f9cfabcf76bf44ea3ffac1c6248d8f29




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] turbaszek commented on a change in pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
turbaszek commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r440917191



##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -26,18 +31,62 @@ def delete_dag_run():
     raise NotImplementedError("Not implemented yet.")
 
 
-def get_dag_run():
+@provide_session
+def get_dag_run(dag_id, dag_run_id, session):
     """
     Get a DAG Run.
     """
-    raise NotImplementedError("Not implemented yet.")
+    query = session.query(DagRun)
+    query = query.filter(DagRun.dag_id == dag_id)
+    query = query.filter(DagRun.run_id == dag_run_id)
+    dag_run = query.one_or_none()
+    if dag_run is None:
+        raise NotFound("DAGRun not found")
+    return dagrun_schema.dump(dag_run)
 
 
-def get_dag_runs():
+@provide_session
+def get_dag_runs(session, dag_id, start_date_gte=None, start_date_lte=None,
+                 execution_date_gte=None, execution_date_lte=None,
+                 end_date_gte=None, end_date_lte=None, offset=None, limit=None):
     """
     Get all DAG Runs.
     """
-    raise NotImplementedError("Not implemented yet.")
+
+    query = session.query(DagRun)
+
+    #  This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs.
+    if dag_id != '~':
+        query = query.filter(DagRun.dag_id == dag_id)
+
+    # filter start date
+    if start_date_gte:
+        query = query.filter(DagRun.start_date >= conn_parse_datetime(start_date_gte))
+
+    if start_date_lte:
+

Review comment:
       ```suggestion
   ```




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r436292454



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -290,7 +287,209 @@ def test_handle_limit_and_offset_in_query(self, session):
         assert response.status_code == 200
         self.assertEqual(response.json.get('total_entries'), 5)
 
-    #  TODO: add tests for filters
+
+class TestGetDagRunsStartDateFilter(TestDagRunEndpoint):
+
+    @provide_session
+    def test_start_date_gte_and_lte(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,  # today
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now + timedelta(days=3),  # next 3 days
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        start_date_gte = self.now + timedelta(days=1)  # gte tomorrow
+        start_date_lte = self.now + timedelta(days=10)  # lte next 10 days
+
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte={start_date_gte}"
+            f"&start_date_lte={start_date_lte}"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(response.json.get('dag_runs')[0].get('start_date'),
+                         (self.now + timedelta(days=3)).isoformat())
+
+    @provide_session
+    def test_only_start_date_gte_provided(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,  # today
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now + timedelta(days=3),  # next 3 days
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        start_date_gte = self.now + timedelta(days=1)  # gte tomorrow
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte={start_date_gte}"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(response.json.get('dag_runs')[0].get('start_date'),
+                         (self.now + timedelta(days=3)).isoformat())
+
+    @provide_session
+    def test_only_start_date_lte_provided(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,  # today
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now + timedelta(days=3),  # next 3 days
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        assert result[0].start_date == self.now
+        start_date_lte = self.now + timedelta(days=1)  # lte tomorrow

Review comment:
       If I change this line to today. It fails.
   `start_date_lte = self.now + timedelta(days=1)  # lte tomorrow`
   




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r437966890



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,347 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = '2020-06-11T18:12:50.773601+00:00'
+        self.now2 = '2020-06-12T18:12:50.773601+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running'):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+            state=state
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now2),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now,
+                'external_trigger': True,
+                'start_date': self.now,
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)

Review comment:
       ```suggestion
           session.add_all(dagruns)
           dagruns[0].dag_id.= "TEST_DAG_ID_2"
   ```

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,347 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = '2020-06-11T18:12:50.773601+00:00'
+        self.now2 = '2020-06-12T18:12:50.773601+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running'):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+            state=state
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now2),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now,
+                'external_trigger': True,
+                'start_date': self.now,
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)

Review comment:
       ```suggestion
           session.add_all(dagruns)
           dagruns[0].dag_id.= "TEST_DAG_ID_2"
   ```
   Can you check here if two different DAGs can be fetched?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r436326853



##########
File path: airflow/api_connexion/schemas/enum_schemas.py
##########
@@ -0,0 +1,24 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from marshmallow import fields, validate
+from marshmallow.schema import Schema
+
+
+class DagState(Schema):
+    """DagState schemagit"""
+    state = fields.Str(validate=validate.OneOf(["success", "running", "failed"]))

Review comment:
       I used the idea on this link below to work on this but I am unsure if it is the right way to do it.
   https://github.com/marshmallow-code/marshmallow/issues/1470




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#issuecomment-643838370


   I would like to review this change once more and look at the filter by dates, but today I see that there is a merge conflict. if you have moments could you do a rebase? 


----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r436292454



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -290,7 +287,209 @@ def test_handle_limit_and_offset_in_query(self, session):
         assert response.status_code == 200
         self.assertEqual(response.json.get('total_entries'), 5)
 
-    #  TODO: add tests for filters
+
+class TestGetDagRunsStartDateFilter(TestDagRunEndpoint):
+
+    @provide_session
+    def test_start_date_gte_and_lte(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,  # today
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now + timedelta(days=3),  # next 3 days
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        start_date_gte = self.now + timedelta(days=1)  # gte tomorrow
+        start_date_lte = self.now + timedelta(days=10)  # lte next 10 days
+
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte={start_date_gte}"
+            f"&start_date_lte={start_date_lte}"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(response.json.get('dag_runs')[0].get('start_date'),
+                         (self.now + timedelta(days=3)).isoformat())
+
+    @provide_session
+    def test_only_start_date_gte_provided(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,  # today
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now + timedelta(days=3),  # next 3 days
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        start_date_gte = self.now + timedelta(days=1)  # gte tomorrow
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte={start_date_gte}"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(response.json.get('dag_runs')[0].get('start_date'),
+                         (self.now + timedelta(days=3)).isoformat())
+
+    @provide_session
+    def test_only_start_date_lte_provided(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,  # today
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now + timedelta(days=3),  # next 3 days
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        assert result[0].start_date == self.now
+        start_date_lte = self.now + timedelta(days=1)  # lte tomorrow

Review comment:
       If I change this line to today. It fails.
   `start_date_lte = self.now + timedelta(days=1)  # lte tomorrow`
   




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r438010702



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,347 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = '2020-06-11T18:00:00+00:00'
+        self.now2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running'):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now2),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now,
+                'external_trigger': True,
+                'start_date': self.now,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        assert result[0].dag_id == result[1].dag_id == 'TEST_DAG_ID'
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.now) + timedelta(minutes=i),
+                start_date=timezone.parse(self.now),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_DAG_RUN_ID8", "TEST_DAG_RUN_ID9"],

Review comment:
       What do you think to assert the start/execution dates here? Identifiers poorly describe these objectshere.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r439122354



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,335 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.default_time = '2020-06-11T18:00:00+00:00'
+        self.default_time_2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running', extra_dag=False):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time_2),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        if extra_dag:
+            dagrun_extra = [DagRun(
+                dag_id='TEST_DAG_ID_' + str(i),
+                run_id='TEST_DAG_RUN_ID_' + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time_2),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            ) for i in range(3, 5)]
+            return [dagrun_model_1, dagrun_model_2] + dagrun_extra
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.default_time,
+                'external_trigger': True,
+                'start_date': self.default_time,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time_2,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run(extra_dag=True)
+        expected_dag_run_ids = ['TEST_DAG_ID', 'TEST_DAG_ID',
+                                "TEST_DAG_ID_3", "TEST_DAG_ID_4"]
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        dag_run_ids = [dag_run["dag_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time) + timedelta(minutes=i),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_18", "TEST_START_EXEC_DAY_19"],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_lte=2020-06-11T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_10"],

Review comment:
       Thanks for the link, it helped. 
   What was happening was that on the view, dates that have this format '2020-06-11T18:00:00+00:00' are changed to '2020-06-11T18:00:00 00:00'. That is, the + are replaced with empty space. So when it is parsed, only the date part were parsed. I then replaced +00:00 with Z and the problem was solved.
   




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r436645575



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,26 +35,471 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 1
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 3)

Review comment:
       Yes. Separate query sound good.

##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -26,18 +34,100 @@ def delete_dag_run():
     raise NotImplementedError("Not implemented yet.")
 
 
-def get_dag_run():
+@provide_session
+def get_dag_run(dag_id, dag_run_id, session):
     """
     Get a DAG Run.
     """
-    raise NotImplementedError("Not implemented yet.")
+    query = session.query(DagRun)
+    query = query.filter(DagRun.dag_id == dag_id)
+    query = query.filter(DagRun.run_id == dag_run_id)
+    dag_run = query.one_or_none()
+    if dag_run is None:
+        raise NotFound("DAGRun not found")
+    return dagrun_schema.dump(dag_run)
 
 
-def get_dag_runs():
+@provide_session
+def get_dag_runs(dag_id, session):
     """
     Get all DAG Runs.
     """
-    raise NotImplementedError("Not implemented yet.")
+    offset = request.args.get(parameters.page_offset, 0)
+    limit = min(int(request.args.get(parameters.page_limit, 100)), 100)
+
+    start_date_gte = parse_datetime_in_query(
+        request.args.get(parameters.filter_start_date_gte, None)
+    )
+    start_date_lte = parse_datetime_in_query(
+        request.args.get(parameters.filter_start_date_lte, None)
+    )
+
+    execution_date_gte = parse_datetime_in_query(
+        request.args.get(parameters.filter_execution_date_gte, None)
+    )
+    execution_date_lte = parse_datetime_in_query(
+        request.args.get(parameters.filter_execution_date_lte, None)
+    )
+
+    end_date_gte = parse_datetime_in_query(
+        request.args.get(parameters.filter_end_date_gte, None)
+    )
+    end_date_lte = parse_datetime_in_query(
+        request.args.get(parameters.filter_end_date_lte, None)
+    )
+
+    query = session.query(DagRun)
+
+    #  This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs.
+    if dag_id == '~':
+        dag_run = query.all()
+        return dagrun_collection_schema.dump(DAGRunCollection(
+            dag_runs=dag_run,
+            total_entries=len(dag_run))
+        )
+
+    query = query.filter(DagRun.dag_id == dag_id)

Review comment:
       ```suggestion
       if dag_id != '~':
           query = query.filter(DagRun.dag_id == dag_id)
   ```
   We still want to have filters and paginations. ~ is a wildcard. It allows any value, so we should give up filtering in this field.

##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -26,18 +34,100 @@ def delete_dag_run():
     raise NotImplementedError("Not implemented yet.")
 
 
-def get_dag_run():
+@provide_session
+def get_dag_run(dag_id, dag_run_id, session):
     """
     Get a DAG Run.
     """
-    raise NotImplementedError("Not implemented yet.")
+    query = session.query(DagRun)
+    query = query.filter(DagRun.dag_id == dag_id)
+    query = query.filter(DagRun.run_id == dag_run_id)
+    dag_run = query.one_or_none()
+    if dag_run is None:
+        raise NotFound("DAGRun not found")
+    return dagrun_schema.dump(dag_run)
 
 
-def get_dag_runs():
+@provide_session
+def get_dag_runs(dag_id, session):
     """
     Get all DAG Runs.
     """
-    raise NotImplementedError("Not implemented yet.")
+    offset = request.args.get(parameters.page_offset, 0)
+    limit = min(int(request.args.get(parameters.page_limit, 100)), 100)
+
+    start_date_gte = parse_datetime_in_query(
+        request.args.get(parameters.filter_start_date_gte, None)
+    )
+    start_date_lte = parse_datetime_in_query(
+        request.args.get(parameters.filter_start_date_lte, None)
+    )
+
+    execution_date_gte = parse_datetime_in_query(
+        request.args.get(parameters.filter_execution_date_gte, None)
+    )
+    execution_date_lte = parse_datetime_in_query(
+        request.args.get(parameters.filter_execution_date_lte, None)
+    )
+
+    end_date_gte = parse_datetime_in_query(
+        request.args.get(parameters.filter_end_date_gte, None)
+    )
+    end_date_lte = parse_datetime_in_query(
+        request.args.get(parameters.filter_end_date_lte, None)
+    )
+
+    query = session.query(DagRun)
+
+    #  This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs.
+    if dag_id == '~':
+        dag_run = query.all()
+        return dagrun_collection_schema.dump(DAGRunCollection(
+            dag_runs=dag_run,
+            total_entries=len(dag_run))
+        )
+
+    query = query.filter(DagRun.dag_id == dag_id)
+
+    # filter start date
+    if start_date_gte and start_date_lte:
+        query = query.filter(DagRun.start_date <= start_date_lte,
+                             DagRun.start_date >= start_date_gte)
+
+    elif start_date_gte and not start_date_lte:
+        query = query.filter(DagRun.start_date >= start_date_gte)
+
+    elif start_date_lte and not start_date_gte:
+        query = query.filter(DagRun.start_date <= start_date_lte)

Review comment:
       ```suggestion
       if start_date_gte:
           query = query.filter(DagRun.start_date >= start_date_gte)
   
       if start_date_lte:
           query = query.filter(DagRun.start_date <= start_date_lte)
   ```
   Is this causing any problems?

##########
File path: airflow/api_connexion/openapi/v1.yaml
##########
@@ -1264,6 +1264,7 @@ components:
           type: string
           format: date-time
           readOnly: True
+          nullable: true

Review comment:
       +1

##########
File path: tests/api_connexion/schemas/test_dag_run_schema.py
##########
@@ -0,0 +1,171 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import unittest
+
+from airflow.api_connexion.schemas.dag_run_schema import (
+    DAGRunCollection, dagrun_collection_schema, dagrun_schema,
+)
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
+from tests.test_utils.db import clear_db_runs
+
+
+class TestDAGRunBase(unittest.TestCase):
+
+    def setUp(self) -> None:
+        clear_db_runs()
+        self.now = timezone.utcnow()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+
+class TestDAGRunSchema(TestDAGRunBase):
+
+    @provide_session
+    def test_serialzie(self, session):
+        dagrun_model = DagRun(run_id='my-dag-run',
+                              run_type=DagRunType.MANUAL.value,
+                              execution_date=self.now,
+                              start_date=self.now,
+                              conf='{"start": "stop"}'
+                              )
+        session.add(dagrun_model)
+        session.commit()
+        dagrun_model = session.query(DagRun).first()
+        deserialized_dagrun = dagrun_schema.dump(dagrun_model)
+
+        self.assertEqual(
+            deserialized_dagrun[0],
+            {
+                'dag_id': None,
+                'dag_run_id': 'my-dag-run',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': str(self.now.isoformat()),
+                'external_trigger': True,
+                'start_date': str(self.now.isoformat()),
+                'conf': '{"start": "stop"}'

Review comment:
       Why do we have a string here? The specification define object here.

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,28 +35,555 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 100)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 100
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 4)
+
+    @provide_session
+    def test_handle_limit_and_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(10)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 10
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=6&offset=5"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 10)

Review comment:
       ```suggestion
   class TestGetDagRunsPagination(TestDagRunEndpoint):
       @parameterized.expand(
           [
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
               (
                   "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
                   [
                       "TEST_DAG_RUN_ID6",
                       "TEST_DAG_RUN_ID7",
                       "TEST_DAG_RUN_ID8",
                       "TEST_DAG_RUN_ID9",
                       "TEST_DAG_RUN_ID10",
                   ],
               ),
               (
                   "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
                   [
                       "TEST_DAG_RUN_ID1",
                       "TEST_DAG_RUN_ID2",
                       "TEST_DAG_RUN_ID3",
                       "TEST_DAG_RUN_ID4",
                       "TEST_DAG_RUN_ID5",
                       "TEST_DAG_RUN_ID6",
                       "TEST_DAG_RUN_ID7",
                       "TEST_DAG_RUN_ID8",
                       "TEST_DAG_RUN_ID9",
                       "TEST_DAG_RUN_ID10",
                   ],
               ),
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
               (
                   "api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2",
                   ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],
               ),
           ]
       )
       @provide_session
       def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
           dagrun_models = self._create_dag_runs(10)
           session.add_all(dagrun_models)
           session.commit()
   
           response = self.client.get(url)
           assert response.status_code == 200
   
           self.assertEqual(response.json["total_entries"], 10)
           dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
           self.assertEqual(dag_run_ids, expected_dag_run_ids)
   
       @provide_session
       def test_should_respect_page_size_limit(self, session):
           dagrun_models = self._create_dag_runs(200)
           session.add_all(dagrun_models)
           session.commit()
   
           response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
           assert response.status_code == 200
   
           self.assertEqual(response.json["total_entries"], 200)
           self.assertEqual(len(response.json["dag_run"]), 100)
   
       def _create_dag_runs(self, count):
           return [
               DagRun(
                   dag_id="TEST_DAG_ID",
                   run_id="TEST_DAG_RUN_ID" + str(i),
                   run_type=DagRunType.MANUAL.value,
                   execution_date=self.now + timedelta(minutes=i),
                   start_date=self.now,
                   external_trigger=True,
               )
               for i in range(1, count + 1)
           ]
   
   
   class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
   ```
   
   Your code is a bit difficult to read. So I prepared an example to show you some good practices.
   1. If you have many tests that have tested a given feature then you can create new classes for the new feature. It is not required that each view has only one test class.
   2. It is worth thinking what you want to check with a given test.  If you want to check pagination, you don't have to check all the fields in the response. All you have to do is check the object identifiers.  On the other hand, if you're writing the first test in a class, it's worth it to have more assertions to prevent regression.
   3. It's a good idea to have one test data set or one data pattern for a test group. This makes it easier for the reviewer to work because he does not have to check that you have prepared the data correctly. If you want to check the filter ranges (GTE / LTE) then you can create 5 objects and then use them in other tests.  You can create a new [factory method for creating test data.
   

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,28 +35,555 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 100)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 100
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 4)
+
+    @provide_session
+    def test_handle_limit_and_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(10)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 10
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=6&offset=5"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 10)
+
+    @provide_session
+    def test_start_date_gte_and_lte(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,  # today
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now + timedelta(days=3),  # next 3 days
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        start_date_gte = (self.now + timedelta(days=1)).isoformat()  # gte tomorrow
+        start_date_lte = (self.now + timedelta(days=10)).isoformat()  # lte next 10 days
+
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte={start_date_gte}"
+            f"&start_date_lte={start_date_lte}"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 2)
+        self.assertEqual(response.json.get('dag_runs')[0].get('start_date'),
+                         (self.now + timedelta(days=3)).isoformat())
+
+    @provide_session
+    def test_only_start_date_gte_provided(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,  # today
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now + timedelta(days=3),  # next 3 days
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        start_date_gte = (self.now + timedelta(days=1)).isoformat()  # gte tomorrow
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte={start_date_gte}"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 2)
+        self.assertEqual(response.json.get('dag_runs')[0].get('start_date'),
+                         (self.now + timedelta(days=3)).isoformat())
+
+    @provide_session
+    def test_only_start_date_lte_provided(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,  # today
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now + timedelta(days=3),  # next 3 days
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        assert result[0].start_date == self.now
+        start_date_lte = (self.now + timedelta(days=1)).isoformat()  # lte tomorrow
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?start_date_lte={start_date_lte}"
+        )
+        assert response.status_code == 200
+
+        self.assertEqual(response.json.get('total_entries'), 2)
+        self.assertEqual(response.json.get('dag_runs')[0].get('start_date'),
+                         self.now.isoformat())  # today
+
+    @provide_session
+    def test_execution_date_gte_and_lte(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,  # today
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(days=3),  # next 3 days,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        execution_date_gte = (self.now + timedelta(days=1)).isoformat()  # gte tomorrow
+        execution_date_lte = (self.now + timedelta(days=10)).isoformat()  # lte next 10 days
+
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?execution_date_gte={execution_date_gte}"
+            f"&execution_date_lte={execution_date_lte}"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 2)
+        self.assertEqual(response.json.get('dag_runs')[0].get('execution_date'),
+                         (self.now + timedelta(days=3)).isoformat())
+
+    @provide_session
+    def test_only_execution_date_gte_provided(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,  # today
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(days=3),  # next 3 days
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        execution_date_gte = (self.now + timedelta(days=1)).isoformat()  # gte tomorrow
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?execution_date_gte={execution_date_gte}"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 2)
+        self.assertEqual(response.json.get('dag_runs')[0].get('execution_date'),
+                         (self.now + timedelta(days=3)).isoformat())
+
+    @provide_session
+    def test_only_execution_date_lte_provided(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,  # today
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(days=3),  # next 3 days
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        execution_date_lte = (self.now + timedelta(days=1)).isoformat()  # lte tomorrow
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?execution_date_lte={execution_date_lte}"
+        )
+        assert response.status_code == 200
+
+        self.assertEqual(response.json.get('total_entries'), 2)
+        self.assertEqual(response.json.get('dag_runs')[0].get('execution_date'),
+                         self.now.isoformat())  # today
+
+    @provide_session
+    def test_end_date_gte_and_lte_in_query(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+            state='success'  # today. The end date will be today
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now + timedelta(days=3),
+            external_trigger=True,
+        )  # state is running so no end date yet
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+
+        end_date_gte = self.now.isoformat()  # gte today
+        end_date_lte = (self.now + timedelta(days=1)).isoformat()  # lte tomorrow
+
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?end_date_gte={end_date_gte}"
+            f"&end_date_lte={end_date_lte}"
+        )
         assert response.status_code == 200
 
+        self.assertEqual(response.json.get('total_entries'), 2)
+        self.assertEqual(response.json.get('dag_runs')[0].get('dag_run_id'),
+                         "TEST_DAG_RUN_ID_1")
+
+    @provide_session
+    def test_only_end_date_gte_provided(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+            state='success'  # today. End date is today
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(days=3),
+            start_date=self.now,
+            external_trigger=True,
+        )  # state is running so no end yet
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        end_date_gte = (self.now + timedelta(days=1)).isoformat()  # gte tomorrow
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?end_date_gte={end_date_gte}"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 2)
+
+    @provide_session
+    def test_only_end_date_lte_provided(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+            state='failed'  # today. End date is today
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(days=3),
+            start_date=self.now,
+            external_trigger=True,
+        )  # state is running so no end date yet
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        end_date_lte = (self.now + timedelta(days=1)).isoformat()  # lte tomorrow
+        response = self.client.get(
+            f"api/v1/dags/TEST_DAG_ID/dagRuns?end_date_lte={end_date_lte}"
+        )
+        assert response.status_code == 200
+
+        self.assertEqual(response.json.get('total_entries'), 2)

Review comment:
       ```suggestion
           self.assertEqual(response.json['total_entries'], 2)
   ```

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,28 +35,555 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 100)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 100
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 4)
+
+    @provide_session
+    def test_handle_limit_and_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(10)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 10
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=6&offset=5"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 10)

Review comment:
       ```suggestion
   class TestGetDagRunsPagination(TestDagRunEndpoint):
       @parameterized.expand(
           [
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
               (
                   "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
                   [
                       "TEST_DAG_RUN_ID6",
                       "TEST_DAG_RUN_ID7",
                       "TEST_DAG_RUN_ID8",
                       "TEST_DAG_RUN_ID9",
                       "TEST_DAG_RUN_ID10",
                   ],
               ),
               (
                   "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
                   [
                       "TEST_DAG_RUN_ID1",
                       "TEST_DAG_RUN_ID2",
                       "TEST_DAG_RUN_ID3",
                       "TEST_DAG_RUN_ID4",
                       "TEST_DAG_RUN_ID5",
                       "TEST_DAG_RUN_ID6",
                       "TEST_DAG_RUN_ID7",
                       "TEST_DAG_RUN_ID8",
                       "TEST_DAG_RUN_ID9",
                       "TEST_DAG_RUN_ID10",
                   ],
               ),
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
               (
                   "api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2",
                   ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],
               ),
           ]
       )
       @provide_session
       def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
           dagrun_models = self._create_dag_runs(10)
           session.add_all(dagrun_models)
           session.commit()
   
           response = self.client.get(url)
           assert response.status_code == 200
   
           self.assertEqual(response.json["total_entries"], 10)
           dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
           self.assertEqual(dag_run_ids, expected_dag_run_ids)
   
       @provide_session
       def test_should_respect_page_size_limit(self, session):
           dagrun_models = self._create_dag_runs(200)
           session.add_all(dagrun_models)
           session.commit()
   
           response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
           assert response.status_code == 200
   
           self.assertEqual(response.json["total_entries"], 200)
           self.assertEqual(len(response.json["dag_run"]), 100)
   
       def _create_dag_runs(self, count):
           return [
               DagRun(
                   dag_id="TEST_DAG_ID",
                   run_id="TEST_DAG_RUN_ID" + str(i),
                   run_type=DagRunType.MANUAL.value,
                   execution_date=self.now + timedelta(minutes=i),
                   start_date=self.now,
                   external_trigger=True,
               )
               for i in range(1, count + 1)
           ]
   
   
   class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
   ```
   
   Your code is a bit difficult to read. So I prepared an example to show you some good practices.
   1. If you have many tests that have tested a given feature then you can create new classes for the new feature. It is not required that each view has only one test class.
   2. It is worth thinking what you want to check with a given test.  If you want to check pagination, you don't have to check all the fields in the response. All you have to do is check the object identifiers.  On the other hand, if you're writing the first test in a class, it's worth it to have more assertions to prevent regression.
   3. It's a good idea to have one test data set or one data pattern for a test group. This makes it easier for the reviewer to work because he does not have to check that you have prepared the data correctly. If you want to check the filter ranges (GTE / LTE) then you can create 5 objects and then use them in other tests.  You can create a new [factory method for creating test data.
   4. If you have many similar tests then you can use parameterized to automatically create many tests.  This means that for all filters you only need one test, but you will have to prepare test data well.  Object identifiers should be reviewer friendly.
   

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,28 +35,555 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 100)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 100
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 4)
+
+    @provide_session
+    def test_handle_limit_and_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(10)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 10
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=6&offset=5"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 10)

Review comment:
       ```suggestion
   class TestGetDagRunsPagination(TestDagRunEndpoint):
       @parameterized.expand(
           [
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
               (
                   "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
                   [
                       "TEST_DAG_RUN_ID6",
                       "TEST_DAG_RUN_ID7",
                       "TEST_DAG_RUN_ID8",
                       "TEST_DAG_RUN_ID9",
                       "TEST_DAG_RUN_ID10",
                   ],
               ),
               (
                   "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
                   [
                       "TEST_DAG_RUN_ID1",
                       "TEST_DAG_RUN_ID2",
                       "TEST_DAG_RUN_ID3",
                       "TEST_DAG_RUN_ID4",
                       "TEST_DAG_RUN_ID5",
                       "TEST_DAG_RUN_ID6",
                       "TEST_DAG_RUN_ID7",
                       "TEST_DAG_RUN_ID8",
                       "TEST_DAG_RUN_ID9",
                       "TEST_DAG_RUN_ID10",
                   ],
               ),
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
               ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
               (
                   "api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2",
                   ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],
               ),
           ]
       )
       @provide_session
       def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
           dagrun_models = self._create_dag_runs(10)
           session.add_all(dagrun_models)
           session.commit()
   
           response = self.client.get(url)
           assert response.status_code == 200
   
           self.assertEqual(response.json["total_entries"], 10)
           dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
           self.assertEqual(dag_run_ids, expected_dag_run_ids)
   
       @provide_session
       def test_should_respect_page_size_limit(self, session):
           dagrun_models = self._create_dag_runs(200)
           session.add_all(dagrun_models)
           session.commit()
   
           response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
           assert response.status_code == 200
   
           self.assertEqual(response.json["total_entries"], 200)
           self.assertEqual(len(response.json["dag_run"]), 100)
   
       def _create_dag_runs(self, count):
           return [
               DagRun(
                   dag_id="TEST_DAG_ID",
                   run_id="TEST_DAG_RUN_ID" + str(i),
                   run_type=DagRunType.MANUAL.value,
                   execution_date=self.now + timedelta(minutes=i),
                   start_date=self.now,
                   external_trigger=True,
               )
               for i in range(1, count + 1)
           ]
   
   
   class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
   ```
   
   Your code is a bit difficult to read. So I prepared an example to show you some good practices.
   1. If you have many tests that have tested a given feature then you can create new classes for the new feature. It is not required that each view has only one test class.
   2. It is worth thinking what you want to check with a given test.  If you want to check pagination, you don't have to check all the fields in the response. All you have to do is check the object identifiers.  On the other hand, if you're writing the first test in a class, it's worth it to have more assertions to prevent regression.
   3. It's a good idea to have one test data set or one data pattern for a test group. This makes it easier for the reviewer to work because he does not have to check that you have prepared the data correctly. If you want to check the filter ranges (GTE / LTE) then you can create 5 objects and then use them in other tests.  You can create a new [factory method for creating test data.
   4. If you have many similar tests then you can use parameterized to automatically create many tests.  This means that for all filters you only need one test, but you will have to prepare test data well.  Object identifiers should be reviewer friendly.  This is not required. You can prepare a separate set of test data for each filter.
   

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,28 +35,555 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()

Review comment:
       I would prefer to use a fixed date in tests. This allows us to use text strings in tests.  Now it is very difficult for me to check if the data format is correct.

##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -26,18 +34,100 @@ def delete_dag_run():
     raise NotImplementedError("Not implemented yet.")
 
 
-def get_dag_run():
+@provide_session
+def get_dag_run(dag_id, dag_run_id, session):
     """
     Get a DAG Run.
     """
-    raise NotImplementedError("Not implemented yet.")
+    query = session.query(DagRun)
+    query = query.filter(DagRun.dag_id == dag_id)
+    query = query.filter(DagRun.run_id == dag_run_id)
+    dag_run = query.one_or_none()
+    if dag_run is None:
+        raise NotFound("DAGRun not found")
+    return dagrun_schema.dump(dag_run)
 
 
-def get_dag_runs():
+@provide_session
+def get_dag_runs(dag_id, session):
     """
     Get all DAG Runs.
     """
-    raise NotImplementedError("Not implemented yet.")
+    offset = request.args.get(parameters.page_offset, 0)
+    limit = min(int(request.args.get(parameters.page_limit, 100)), 100)
+
+    start_date_gte = parse_datetime_in_query(

Review comment:
       What do you think about [webargs](https://webargs.readthedocs.io/en/latest/) library? Would it be helpful to you? 
   

##########
File path: airflow/api_connexion/schemas/dag_run_schema.py
##########
@@ -0,0 +1,87 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from typing import List, NamedTuple
+
+from marshmallow import ValidationError, fields
+from marshmallow.schema import Schema
+from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field
+
+from airflow.api_connexion.schemas.enum_schemas import DagState
+from airflow.models.dagrun import DagRun
+
+
+class DAGRunSchema(SQLAlchemySchema):
+    """
+    Schema for DAGRun
+    """
+
+    class Meta:
+        """ Meta """
+        model = DagRun
+
+    run_id = auto_field(dump_to='dag_run_id', load_from='dag_run_id')
+    dag_id = auto_field(dump_only=True)
+    execution_date = auto_field()
+    start_date = auto_field(dump_only=True)
+    end_date = auto_field(dump_only=True)
+    state = fields.Method('get_state', deserialize='load_state')

Review comment:
       It should. be defined as a custom fields.  This allows you to eliminate some hacks from the code.
   ```python
   class DagStateField(fields.String):
       
       def __init__(self, **metadata):
           super().__init__(**metadata)
           self.validators = (
               [validate.OneOf(State.dag_states)] + list(self.validators)
           )
   ```




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r439073206



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,335 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.default_time = '2020-06-11T18:00:00+00:00'
+        self.default_time_2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running', extra_dag=False):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time_2),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        if extra_dag:
+            dagrun_extra = [DagRun(
+                dag_id='TEST_DAG_ID_' + str(i),
+                run_id='TEST_DAG_RUN_ID_' + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time_2),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            ) for i in range(3, 5)]
+            return [dagrun_model_1, dagrun_model_2] + dagrun_extra
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.default_time,
+                'external_trigger': True,
+                'start_date': self.default_time,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time_2,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run(extra_dag=True)
+        expected_dag_run_ids = ['TEST_DAG_ID', 'TEST_DAG_ID',
+                                "TEST_DAG_ID_3", "TEST_DAG_ID_4"]
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        dag_run_ids = [dag_run["dag_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time) + timedelta(minutes=i),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_18", "TEST_START_EXEC_DAY_19"],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_lte=2020-06-11T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_10"],

Review comment:
       ```suggestion
                   ["TEST_START_EXEC_DAY_10", "TEST_START_EXEC_DAY_11"],
   ```
   We have an inclusive filter, so it should probably be included as well. However, this may change in the future.
   
   Related issue: 
   https://github.com/apache/airflow/issues/9237




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r439122354



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,335 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.default_time = '2020-06-11T18:00:00+00:00'
+        self.default_time_2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running', extra_dag=False):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time_2),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        if extra_dag:
+            dagrun_extra = [DagRun(
+                dag_id='TEST_DAG_ID_' + str(i),
+                run_id='TEST_DAG_RUN_ID_' + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time_2),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            ) for i in range(3, 5)]
+            return [dagrun_model_1, dagrun_model_2] + dagrun_extra
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.default_time,
+                'external_trigger': True,
+                'start_date': self.default_time,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time_2,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run(extra_dag=True)
+        expected_dag_run_ids = ['TEST_DAG_ID', 'TEST_DAG_ID',
+                                "TEST_DAG_ID_3", "TEST_DAG_ID_4"]
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        dag_run_ids = [dag_run["dag_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time) + timedelta(minutes=i),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_18", "TEST_START_EXEC_DAY_19"],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_lte=2020-06-11T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_10"],

Review comment:
       Thanks for the link, it helped. 
   What was happening was that on the view, dates that have this format '2020-06-11T18:00:00+00:00' are changed to '2020-06-11T18:00:00 00:00'. That is, the + are replaced with empty space. So when it is parsed, only the date part were parsed. I then replaced +00:00 with Z and the problem was solved.
   
   Maybe we should open an issue for custom connexion datetime format validator




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r439122354



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,335 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.default_time = '2020-06-11T18:00:00+00:00'
+        self.default_time_2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running', extra_dag=False):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time_2),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        if extra_dag:
+            dagrun_extra = [DagRun(
+                dag_id='TEST_DAG_ID_' + str(i),
+                run_id='TEST_DAG_RUN_ID_' + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time_2),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            ) for i in range(3, 5)]
+            return [dagrun_model_1, dagrun_model_2] + dagrun_extra
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.default_time,
+                'external_trigger': True,
+                'start_date': self.default_time,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time_2,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run(extra_dag=True)
+        expected_dag_run_ids = ['TEST_DAG_ID', 'TEST_DAG_ID',
+                                "TEST_DAG_ID_3", "TEST_DAG_ID_4"]
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        dag_run_ids = [dag_run["dag_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time) + timedelta(minutes=i),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_18", "TEST_START_EXEC_DAY_19"],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_lte=2020-06-11T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_10"],

Review comment:
       Thanks for the link, it helped. 
   What was happening was that on the view dates that have this format '2020-06-11T18:00:00+00:00' are changed to '2020-06-11T18:00:00 00:00'. That is, the + are replaced with empty space. So when it is parsed, only the date part were parsed. I then replaced +00:00 with Z and the problem was solved.
   
   Maybe we should open an issue for custom connexion datetime format validator




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r438013490



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,347 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = '2020-06-11T18:12:50.773601+00:00'
+        self.now2 = '2020-06-12T18:12:50.773601+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running'):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+            state=state
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now2),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now,
+                'external_trigger': True,
+                'start_date': self.now,
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)

Review comment:
       ~ means that you can fetch any DAG ID. Your assertion should check if you have two DAGs e.g. DAG A and DAG B, you can fetch them. For now, you only have one DAG, which does not allow you to check the behavior of this view.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r438867090



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,347 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = '2020-06-11T18:12:50.773601+00:00'
+        self.now2 = '2020-06-12T18:12:50.773601+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running'):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+            state=state
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now2),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now,
+                'external_trigger': True,
+                'start_date': self.now,
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)

Review comment:
       😻




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r436643921



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,26 +35,471 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 1
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 3)

Review comment:
       Looks like the offset=1 is what is making it to return 3 instead of 4. I have tried different ways for the schema and total_entries but it always come as 3 instead of 4. I used pdb debugger and found out the offset is the cause. The query it runs is this:
       
       result2 = session.query(DagRun).offset(1).limit(100).all()
       assert len(result2) == 3
   
   The number of DagRun in db is 4 and it offsets from 1 returning the rest.
   Should I run a separate query that returns all entries and assign it to total_entries?

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,26 +35,471 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 1
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 3)

Review comment:
       Looks like the offset=1 is what is making it to return 3 instead of 4. I have tried different ways for the schema and total_entries definition but it always come as 3 instead of 4. I used pdb debugger and found out the offset is the cause. The query it runs is this:
       
       result2 = session.query(DagRun).offset(1).limit(100).all()
       assert len(result2) == 3
   
   The number of DagRun in db is 4 and it offsets from 1 returning the rest.
   Should I run a separate query that returns all entries and assign it to total_entries?

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,26 +35,471 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 1
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 3)

Review comment:
       Look what I found when I ran webserver
   **Without Filter Top**
   ![withoutfiltertop](https://user-images.githubusercontent.com/4122866/84039791-af0a4e00-a999-11ea-9e14-dc183ae48d33.png)
   **With Filter Top**
   ![withfiltertop](https://user-images.githubusercontent.com/4122866/84039923-db25cf00-a999-11ea-922b-87f76770338a.png)
   **Without Filter Bottom**
   ![withoutfilterbottom](https://user-images.githubusercontent.com/4122866/84039990-f1338f80-a999-11ea-9fb1-5d4fdddc3a77.png)
   **With Filter Bottom**
   ![withfilterbottom](https://user-images.githubusercontent.com/4122866/84040041-06102300-a99a-11ea-9df0-8b3cc861df71.png)
   
   

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,26 +35,471 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 1
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 3)

Review comment:
       I am thinking that we have been handling the total_entries correctly

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,26 +35,471 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 1
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 3)

Review comment:
       Look what I found when I ran webserver
   
   **Without Filter Top**
   ![withoutfiltertop](https://user-images.githubusercontent.com/4122866/84039791-af0a4e00-a999-11ea-9e14-dc183ae48d33.png)
   
   **With Filter Top**
   ![withfiltertop](https://user-images.githubusercontent.com/4122866/84039923-db25cf00-a999-11ea-922b-87f76770338a.png)
   
   **Without Filter Bottom**
   ![withoutfilterbottom](https://user-images.githubusercontent.com/4122866/84039990-f1338f80-a999-11ea-9fb1-5d4fdddc3a77.png)
   
   **With Filter Bottom**
   ![withfilterbottom](https://user-images.githubusercontent.com/4122866/84040041-06102300-a99a-11ea-9df0-8b3cc861df71.png)
   
   

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,26 +35,471 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 1
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 3)

Review comment:
       Look what I found when I ran webserver
   
   **Without Filter. Total entries is 56**
   ![withoutfiltertop](https://user-images.githubusercontent.com/4122866/84039791-af0a4e00-a999-11ea-9e14-dc183ae48d33.png)
   
   **With Filter. Total entries is 20**
   ![withfiltertop](https://user-images.githubusercontent.com/4122866/84039923-db25cf00-a999-11ea-922b-87f76770338a.png)
   
   **Without Filter Bottom. Total entries is 56**
   ![withoutfilterbottom](https://user-images.githubusercontent.com/4122866/84039990-f1338f80-a999-11ea-9fb1-5d4fdddc3a77.png)
   
   **With Filter Bottom. Total entries is 20**
   ![withfilterbottom](https://user-images.githubusercontent.com/4122866/84040041-06102300-a99a-11ea-9df0-8b3cc861df71.png)
   
   

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,26 +35,471 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 1
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 3)

Review comment:
       I will really appreciate if you could take a look at this once more

##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -29,26 +35,471 @@ def setUpClass(cls) -> None:
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = timezone.utcnow()
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now.isoformat(),
+                'external_trigger': True,
+                'start_date': self.now.isoformat(),
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now,
+            start_date=self.now,
+            external_trigger=True,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=1),
+            start_date=self.now,
+            external_trigger=True,
+        )
+        session.add_all([dagrun_model_1, dagrun_model_2])
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now.isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=1)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_handle_limit_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(100)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 100
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?limit=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 1)
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID0',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': (self.now + timedelta(minutes=0)).isoformat(),
+                        'external_trigger': True,
+                        'start_date': self.now.isoformat(),
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 1
+            }
+        )
+
+    @provide_session
+    def test_handle_offset_in_query(self, session):
+        dagrun_models = [DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID' + str(i),
+            run_type=DagRunType.MANUAL.value,
+            execution_date=self.now + timedelta(minutes=i),
+            start_date=self.now,
+            external_trigger=True,
+        ) for i in range(4)]
+
+        session.add_all(dagrun_models)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+
+        response = self.client.get(
+            "api/v1/dags/TEST_DAG_ID/dagRuns?offset=1"
+        )
+        assert response.status_code == 200
+        self.assertEqual(response.json.get('total_entries'), 3)

Review comment:
       Fixed [5096389](https://github.com/apache/airflow/pull/9153/commits/5096389c1447d8100e70fdab7b593bfd4112aaf9)

##########
File path: tests/api_connexion/schemas/test_dag_run_schema.py
##########
@@ -0,0 +1,171 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import unittest
+
+from airflow.api_connexion.schemas.dag_run_schema import (
+    DAGRunCollection, dagrun_collection_schema, dagrun_schema,
+)
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
+from tests.test_utils.db import clear_db_runs
+
+
+class TestDAGRunBase(unittest.TestCase):
+
+    def setUp(self) -> None:
+        clear_db_runs()
+        self.now = timezone.utcnow()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+
+class TestDAGRunSchema(TestDAGRunBase):
+
+    @provide_session
+    def test_serialzie(self, session):
+        dagrun_model = DagRun(run_id='my-dag-run',
+                              run_type=DagRunType.MANUAL.value,
+                              execution_date=self.now,
+                              start_date=self.now,
+                              conf='{"start": "stop"}'
+                              )
+        session.add(dagrun_model)
+        session.commit()
+        dagrun_model = session.query(DagRun).first()
+        deserialized_dagrun = dagrun_schema.dump(dagrun_model)
+
+        self.assertEqual(
+            deserialized_dagrun[0],
+            {
+                'dag_id': None,
+                'dag_run_id': 'my-dag-run',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': str(self.now.isoformat()),
+                'external_trigger': True,
+                'start_date': str(self.now.isoformat()),
+                'conf': '{"start": "stop"}'

Review comment:
       I found that this can solve it for us without any validation code from our side:
   
   `app.add_api("openapi.yaml", strict_validation=True)`
   
   https://connexion.readthedocs.io/en/latest/request.html#parameter-validation
   
   Should I add it?

##########
File path: tests/api_connexion/schemas/test_dag_run_schema.py
##########
@@ -0,0 +1,171 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import unittest
+
+from airflow.api_connexion.schemas.dag_run_schema import (
+    DAGRunCollection, dagrun_collection_schema, dagrun_schema,
+)
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
+from tests.test_utils.db import clear_db_runs
+
+
+class TestDAGRunBase(unittest.TestCase):
+
+    def setUp(self) -> None:
+        clear_db_runs()
+        self.now = timezone.utcnow()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+
+class TestDAGRunSchema(TestDAGRunBase):
+
+    @provide_session
+    def test_serialzie(self, session):
+        dagrun_model = DagRun(run_id='my-dag-run',
+                              run_type=DagRunType.MANUAL.value,
+                              execution_date=self.now,
+                              start_date=self.now,
+                              conf='{"start": "stop"}'
+                              )
+        session.add(dagrun_model)
+        session.commit()
+        dagrun_model = session.query(DagRun).first()
+        deserialized_dagrun = dagrun_schema.dump(dagrun_model)
+
+        self.assertEqual(
+            deserialized_dagrun[0],
+            {
+                'dag_id': None,
+                'dag_run_id': 'my-dag-run',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': str(self.now.isoformat()),
+                'external_trigger': True,
+                'start_date': str(self.now.isoformat()),
+                'conf': '{"start": "stop"}'

Review comment:
       I found that this can solve it for us without any validation code from our side:
   
   `app.add_api("openapi.yaml", strict_validation=True)`
   
   https://connexion.readthedocs.io/en/latest/request.html#parameter-validation
   
   Should I add it?

##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -26,18 +34,100 @@ def delete_dag_run():
     raise NotImplementedError("Not implemented yet.")
 
 
-def get_dag_run():
+@provide_session
+def get_dag_run(dag_id, dag_run_id, session):
     """
     Get a DAG Run.
     """
-    raise NotImplementedError("Not implemented yet.")
+    query = session.query(DagRun)
+    query = query.filter(DagRun.dag_id == dag_id)
+    query = query.filter(DagRun.run_id == dag_run_id)
+    dag_run = query.one_or_none()
+    if dag_run is None:
+        raise NotFound("DAGRun not found")
+    return dagrun_schema.dump(dag_run)
 
 
-def get_dag_runs():
+@provide_session
+def get_dag_runs(dag_id, session):
     """
     Get all DAG Runs.
     """
-    raise NotImplementedError("Not implemented yet.")
+    offset = request.args.get(parameters.page_offset, 0)
+    limit = min(int(request.args.get(parameters.page_limit, 100)), 100)
+
+    start_date_gte = parse_datetime_in_query(

Review comment:
       I found that this can solve it for us without any validation code from our side:
   
   app.add_api("openapi.yaml", strict_validation=True)
   
   https://connexion.readthedocs.io/en/latest/request.html#parameter-validation
   
   Should I add it?

##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -26,18 +34,100 @@ def delete_dag_run():
     raise NotImplementedError("Not implemented yet.")
 
 
-def get_dag_run():
+@provide_session
+def get_dag_run(dag_id, dag_run_id, session):
     """
     Get a DAG Run.
     """
-    raise NotImplementedError("Not implemented yet.")
+    query = session.query(DagRun)
+    query = query.filter(DagRun.dag_id == dag_id)
+    query = query.filter(DagRun.run_id == dag_run_id)
+    dag_run = query.one_or_none()
+    if dag_run is None:
+        raise NotFound("DAGRun not found")
+    return dagrun_schema.dump(dag_run)
 
 
-def get_dag_runs():
+@provide_session
+def get_dag_runs(dag_id, session):
     """
     Get all DAG Runs.
     """
-    raise NotImplementedError("Not implemented yet.")
+    offset = request.args.get(parameters.page_offset, 0)
+    limit = min(int(request.args.get(parameters.page_limit, 100)), 100)
+
+    start_date_gte = parse_datetime_in_query(

Review comment:
       I found that this can solve it for us without any validation code from our side:
   
   app.add_api("openapi.yaml", strict_validation=True)
   
   https://connexion.readthedocs.io/en/latest/request.html#parameter-validation




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r436292265



##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -73,36 +71,36 @@ def get_dag_runs(dag_id, session):
 
     # filter start date
     if start_date_gte and start_date_lte:
-        query = query.filter(DagRun.start_date <= start_date_lte,
-                             DagRun.start_date >= start_date_gte)
+        query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte),
+                             DagRun.start_date >= timezone.parse(start_date_gte))
 
     elif start_date_gte and not start_date_lte:
-        query = query.filter(DagRun.start_date >= start_date_gte)
+        query = query.filter(DagRun.start_date >= timezone.parse(start_date_gte))
 
     elif start_date_lte and not start_date_gte:
-        query = query.filter(DagRun.start_date <= start_date_lte)
+        query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte))

Review comment:
       Hi @mik-laj , please is there another way I could do this without `<=` operator. It seems not to work when times are equal.

##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -73,36 +71,36 @@ def get_dag_runs(dag_id, session):
 
     # filter start date
     if start_date_gte and start_date_lte:
-        query = query.filter(DagRun.start_date <= start_date_lte,
-                             DagRun.start_date >= start_date_gte)
+        query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte),
+                             DagRun.start_date >= timezone.parse(start_date_gte))
 
     elif start_date_gte and not start_date_lte:
-        query = query.filter(DagRun.start_date >= start_date_gte)
+        query = query.filter(DagRun.start_date >= timezone.parse(start_date_gte))
 
     elif start_date_lte and not start_date_gte:
-        query = query.filter(DagRun.start_date <= start_date_lte)
+        query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte))

Review comment:
       After reading this stackoverflow answer, it looks like it is correct this way but I don't know why my test fail when I set start_date_lte to be equal to DagRun.start_date. I will call your attention for review, once I'm done
   https://stackoverflow.com/questions/51451768/sqlalchemy-querying-with-datetime-columns-to-filter-by-month-day-year/51468737#51468737




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r438005670



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,347 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = '2020-06-11T18:12:50.773601+00:00'
+        self.now2 = '2020-06-12T18:12:50.773601+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running'):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+            state=state
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now2),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now,
+                'external_trigger': True,
+                'start_date': self.now,
+                'conf': {},
+            }
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {
+                'detail': None,
+                'status': 404,
+                'title': 'DAGRun not found',
+                'type': 'about:blank'
+            },
+            response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    }
+                ],
+                "total_entries": 2
+            }
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)

Review comment:
       Check if this is ok https://github.com/apache/airflow/pull/9153/commits/a809933ef16f081cfdf7dcca1bab4d6bfcec39b9




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r436292265



##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -73,36 +71,36 @@ def get_dag_runs(dag_id, session):
 
     # filter start date
     if start_date_gte and start_date_lte:
-        query = query.filter(DagRun.start_date <= start_date_lte,
-                             DagRun.start_date >= start_date_gte)
+        query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte),
+                             DagRun.start_date >= timezone.parse(start_date_gte))
 
     elif start_date_gte and not start_date_lte:
-        query = query.filter(DagRun.start_date >= start_date_gte)
+        query = query.filter(DagRun.start_date >= timezone.parse(start_date_gte))
 
     elif start_date_lte and not start_date_gte:
-        query = query.filter(DagRun.start_date <= start_date_lte)
+        query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte))

Review comment:
       Hi @mik-laj , please is there another way I could do this without `<=` operator. It seems not to work when times are equal.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r436362002



##########
File path: airflow/api_connexion/endpoints/dag_run_endpoint.py
##########
@@ -73,36 +71,36 @@ def get_dag_runs(dag_id, session):
 
     # filter start date
     if start_date_gte and start_date_lte:
-        query = query.filter(DagRun.start_date <= start_date_lte,
-                             DagRun.start_date >= start_date_gte)
+        query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte),
+                             DagRun.start_date >= timezone.parse(start_date_gte))
 
     elif start_date_gte and not start_date_lte:
-        query = query.filter(DagRun.start_date >= start_date_gte)
+        query = query.filter(DagRun.start_date >= timezone.parse(start_date_gte))
 
     elif start_date_lte and not start_date_gte:
-        query = query.filter(DagRun.start_date <= start_date_lte)
+        query = query.filter(DagRun.start_date <= timezone.parse(start_date_lte))

Review comment:
       After reading this stackoverflow answer, it looks like it is correct this way but I don't know why my test fail when I set start_date_lte to be equal to DagRun.start_date. I will call your attention for review, once I'm done
   https://stackoverflow.com/questions/51451768/sqlalchemy-querying-with-datetime-columns-to-filter-by-month-day-year/51468737#51468737




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r436326853



##########
File path: airflow/api_connexion/schemas/enum_schemas.py
##########
@@ -0,0 +1,24 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from marshmallow import fields, validate
+from marshmallow.schema import Schema
+
+
+class DagState(Schema):
+    """DagState schemagit"""
+    state = fields.Str(validate=validate.OneOf(["success", "running", "failed"]))

Review comment:
       I used the idea on this link below to work on this but I am unsure if it is the right way to do it.
   https://github.com/marshmallow-code/marshmallow/issues/1470




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r439078448



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,335 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.default_time = '2020-06-11T18:00:00+00:00'
+        self.default_time_2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running', extra_dag=False):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time_2),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        if extra_dag:
+            dagrun_extra = [DagRun(
+                dag_id='TEST_DAG_ID_' + str(i),
+                run_id='TEST_DAG_RUN_ID_' + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time_2),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            ) for i in range(3, 5)]
+            return [dagrun_model_1, dagrun_model_2] + dagrun_extra
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.default_time,
+                'external_trigger': True,
+                'start_date': self.default_time,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time_2,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run(extra_dag=True)
+        expected_dag_run_ids = ['TEST_DAG_ID', 'TEST_DAG_ID',
+                                "TEST_DAG_ID_3", "TEST_DAG_ID_4"]
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        dag_run_ids = [dag_run["dag_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time) + timedelta(minutes=i),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_18", "TEST_START_EXEC_DAY_19"],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_lte=2020-06-11T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_10"],

Review comment:
       I have a day off today, so I have limited options to check it out, but it can help you.
   https://github.com/apache/airflow/blob/master/TESTING.rst#tracking-sql-statements




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] ephraimbuddy commented on a change in pull request #9153: add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
ephraimbuddy commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r439076522



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,335 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.default_time = '2020-06-11T18:00:00+00:00'
+        self.default_time_2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running', extra_dag=False):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time_2),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        if extra_dag:
+            dagrun_extra = [DagRun(
+                dag_id='TEST_DAG_ID_' + str(i),
+                run_id='TEST_DAG_RUN_ID_' + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time_2),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            ) for i in range(3, 5)]
+            return [dagrun_model_1, dagrun_model_2] + dagrun_extra
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.default_time),
+            start_date=timezone.parse(self.default_time),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.default_time,
+                'external_trigger': True,
+                'start_date': self.default_time,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.default_time_2,
+                        'external_trigger': True,
+                        'start_date': self.default_time,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run(extra_dag=True)
+        expected_dag_run_ids = ['TEST_DAG_ID', 'TEST_DAG_ID',
+                                "TEST_DAG_ID_3", "TEST_DAG_ID_4"]
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 4
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        dag_run_ids = [dag_run["dag_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.default_time) + timedelta(minutes=i),
+                start_date=timezone.parse(self.default_time),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_18", "TEST_START_EXEC_DAY_19"],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_lte=2020-06-11T18:00:00+00:00",
+                ["TEST_START_EXEC_DAY_10"],

Review comment:
       This seems like a problem with `<=` operator in sqlalchemy. It returns only the less than values. 
   I have also tried separating the operator and using the `or_` operator but it's still not inclusive
   I am wondering if there is another way I could do it?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



[GitHub] [airflow] mik-laj commented on a change in pull request #9153: [WIP] add readonly endpoints for dagruns

Posted by GitBox <gi...@apache.org>.
mik-laj commented on a change in pull request #9153:
URL: https://github.com/apache/airflow/pull/9153#discussion_r438010702



##########
File path: tests/api_connexion/endpoints/test_dag_run_endpoint.py
##########
@@ -15,41 +15,347 @@
 # specific language governing permissions and limitations
 # under the License.
 import unittest
+from datetime import timedelta
 
 import pytest
+from parameterized import parameterized
 
+from airflow.models import DagRun
+from airflow.utils import timezone
+from airflow.utils.session import provide_session
+from airflow.utils.types import DagRunType
 from airflow.www import app
+from tests.test_utils.db import clear_db_runs
 
 
 class TestDagRunEndpoint(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         super().setUpClass()
+
         cls.app = app.create_app(testing=True)  # type:ignore
 
     def setUp(self) -> None:
         self.client = self.app.test_client()  # type:ignore
+        self.now = '2020-06-11T18:00:00+00:00'
+        self.now2 = '2020-06-12T18:00:00+00:00'
+        clear_db_runs()
+
+    def tearDown(self) -> None:
+        clear_db_runs()
+
+    def _create_test_dag_run(self, state='running'):
+        dagrun_model_1 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_1',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+            state=state,
+        )
+        dagrun_model_2 = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID_2',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now2),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        return [dagrun_model_1, dagrun_model_2]
 
 
 class TestDeleteDagRun(TestDagRunEndpoint):
     @pytest.mark.skip(reason="Not implemented yet")
     def test_should_response_200(self):
-        response = self.client.delete("/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
+        response = self.client.delete("api/v1/dags/TEST_DAG_ID}/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 204
 
 
 class TestGetDagRun(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagrun_model = DagRun(
+            dag_id='TEST_DAG_ID',
+            run_id='TEST_DAG_RUN_ID',
+            run_type=DagRunType.MANUAL.value,
+            execution_date=timezone.parse(self.now),
+            start_date=timezone.parse(self.now),
+            external_trigger=True,
+        )
+        session.add(dagrun_model)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 1
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns/TEST_DAG_RUN_ID")
         assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                'dag_id': 'TEST_DAG_ID',
+                'dag_run_id': 'TEST_DAG_RUN_ID',
+                'end_date': None,
+                'state': 'running',
+                'execution_date': self.now,
+                'external_trigger': True,
+                'start_date': self.now,
+                'conf': {},
+            },
+        )
+
+    def test_should_response_404(self):
+        response = self.client.get("api/v1/dags/invalid-id/dagRuns/invalid-id")
+        assert response.status_code == 404
+        self.assertEqual(
+            {'detail': None, 'status': 404, 'title': 'DAGRun not found', 'type': 'about:blank'}, response.json
+        )
 
 
 class TestGetDagRuns(TestDagRunEndpoint):
-    @pytest.mark.skip(reason="Not implemented yet")
-    def test_should_response_200(self):
-        response = self.client.get("/dags/TEST_DAG_ID/dagRuns/")
+    @provide_session
+    def test_should_response_200(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+    @provide_session
+    def test_should_return_all_with_tilde_as_dag_id(self, session):
+        dagruns = self._create_test_dag_run()
+        session.add_all(dagruns)
+        session.commit()
+        result = session.query(DagRun).all()
+        assert len(result) == 2
+        assert result[0].dag_id == result[1].dag_id == 'TEST_DAG_ID'
+        response = self.client.get("api/v1/dags/~/dagRuns")
+        assert response.status_code == 200
+        self.assertEqual(
+            response.json,
+            {
+                "dag_runs": [
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_1',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                    {
+                        'dag_id': 'TEST_DAG_ID',
+                        'dag_run_id': 'TEST_DAG_RUN_ID_2',
+                        'end_date': None,
+                        'state': 'running',
+                        'execution_date': self.now2,
+                        'external_trigger': True,
+                        'start_date': self.now,
+                        'conf': {},
+                    },
+                ],
+                "total_entries": 2,
+            },
+        )
+
+
+class TestGetDagRunsPagination(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1", ["TEST_DAG_RUN_ID1"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2", ["TEST_DAG_RUN_ID1", "TEST_DAG_RUN_ID2"]),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=5",
+                [
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?offset=0",
+                [
+                    "TEST_DAG_RUN_ID1",
+                    "TEST_DAG_RUN_ID2",
+                    "TEST_DAG_RUN_ID3",
+                    "TEST_DAG_RUN_ID4",
+                    "TEST_DAG_RUN_ID5",
+                    "TEST_DAG_RUN_ID6",
+                    "TEST_DAG_RUN_ID7",
+                    "TEST_DAG_RUN_ID8",
+                    "TEST_DAG_RUN_ID9",
+                    "TEST_DAG_RUN_ID10",
+                ],
+            ),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=5", ["TEST_DAG_RUN_ID6"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=1&offset=1", ["TEST_DAG_RUN_ID2"]),
+            ("api/v1/dags/TEST_DAG_ID/dagRuns?limit=2&offset=2", ["TEST_DAG_RUN_ID3", "TEST_DAG_RUN_ID4"],),
+        ]
+    )
+    @provide_session
+    def test_handle_limit_and_offset(self, url, expected_dag_run_ids, session):
+        dagrun_models = self._create_dag_runs(10)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get(url)
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 10)
+        dag_run_ids = [dag_run["dag_run_id"] for dag_run in response.json["dag_runs"]]
+        self.assertEqual(dag_run_ids, expected_dag_run_ids)
+
+    @provide_session
+    def test_should_respect_page_size_limit(self, session):
+        dagrun_models = self._create_dag_runs(200)
+        session.add_all(dagrun_models)
+        session.commit()
+
+        response = self.client.get("api/v1/dags/TEST_DAG_ID/dagRuns?limit=150")
+        assert response.status_code == 200
+
+        self.assertEqual(response.json["total_entries"], 200)
+        self.assertEqual(len(response.json["dag_runs"]), 100)
+
+    def _create_dag_runs(self, count):
+        return [
+            DagRun(
+                dag_id="TEST_DAG_ID",
+                run_id="TEST_DAG_RUN_ID" + str(i),
+                run_type=DagRunType.MANUAL.value,
+                execution_date=timezone.parse(self.now) + timedelta(minutes=i),
+                start_date=timezone.parse(self.now),
+                external_trigger=True,
+            )
+            for i in range(1, count + 1)
+        ]
+
+
+class TestGetDagRunsPaginationFilters(TestDagRunEndpoint):
+    @parameterized.expand(
+        [
+            (
+                "api/v1/dags/TEST_DAG_ID/dagRuns?start_date_gte=2020-06-18T18:00:00+00:00",
+                ["TEST_DAG_RUN_ID8", "TEST_DAG_RUN_ID9"],

Review comment:
       What do you think to assert the start/execution dates here? Identifiers poorly describe these assertions.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org