You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2020/08/11 22:34:53 UTC

[airflow] 12/32: Constraint files are now maintained automatically (#9889)

This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 5f93baf3f8a785b93b6ee9811d3938d8200c55ad
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Mon Jul 20 14:36:03 2020 +0200

    Constraint files are now maintained automatically (#9889)
    
    * Constraint files are now maintained automatically
    
    * No need to generate requirements when setup.py changes
    * requirements are kept in separate orphan branches not in main repo
    * merges to master verify if latest requirements are working and
      push tested requirements to orphaned branches
    * we keep history of requirement changes and can label them
      individually for each version (by constraint-1.10.n tag name)
    * consistently changed all references to be 'constraints' not
      'requirements'
    
    (cherry picked from commit de9eaeb434747897a192ef31815fbdd519e29c4d)
---
 .dockerignore                                      |   1 -
 .github/workflows/ci.yml                           | 147 +++++++++++++--------
 BREEZE.rst                                         |  64 +++++----
 CI.rst                                             | 113 +++++++++-------
 CONTRIBUTING.rst                                   |  71 ++++------
 Dockerfile                                         |  17 +--
 Dockerfile.ci                                      |  21 +--
 IMAGES.rst                                         |  75 +++++------
 INSTALL                                            |   7 +-
 LOCAL_VIRTUALENV.rst                               |  13 +-
 README.md                                          |  17 +--
 breeze                                             |  41 +++---
 breeze-complete                                    |   2 +-
 common/_default_branch.sh                          |   1 +
 docs/installation.rst                              |  31 +++--
 requirements/REMOVE.md                             |  22 +++
 .../ci_generate_constraints.sh}                    |   2 +-
 scripts/ci/docker-compose/local.yml                |   1 -
 .../ci/in_container/run_generate_constraints.sh    |  50 +++++++
 .../ci/in_container/run_generate_requirements.sh   |  80 -----------
 scripts/ci/kubernetes/ci_run_kubernetes_tests.sh   |   5 +-
 scripts/ci/libraries/_build_images.sh              |  34 ++---
 scripts/ci/libraries/_initialization.sh            |  16 +--
 scripts/ci/libraries/_local_mounts.sh              |   1 -
 scripts/ci/libraries/_runs.sh                      |   8 +-
 .../pre_commit/pre_commit_generate_requirements.sh |  24 ----
 scripts/ci/static_checks/ci_run_static_checks.sh   |   3 +
 .../ci/tools/ci_check_if_tests_should_be_run.sh    |   1 -
 28 files changed, 444 insertions(+), 424 deletions(-)

diff --git a/.dockerignore b/.dockerignore
index 6f89516..d7d621d 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -46,7 +46,6 @@
 !MANIFEST.in
 !NOTICE
 !.github
-!requirements
 !empty
 
 # Avoid triggering context change on README change (new companies using Airflow)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 604fa0d..aac8be1 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -31,7 +31,7 @@ env:
   SKIP_CI_IMAGE_CHECK: "true"
   DB_RESET: "true"
   VERBOSE: "true"
-  UPGRADE_TO_LATEST_REQUIREMENTS: "false"
+  UPGRADE_TO_LATEST_CONSTRAINTS: ${{ github.event_name == 'push' || github.event_name == 'scheduled' }}
   PYTHON_MAJOR_MINOR_VERSION: 3.6
   USE_GITHUB_REGISTRY: "true"
   CACHE_IMAGE_PREFIX: ${{ github.repository }}
@@ -66,7 +66,6 @@ jobs:
       - cancel-previous-workflow-run
     env:
       MOUNT_SOURCE_DIR_FOR_STATIC_CHECKS: "true"
-      CI_JOB_TYPE: "Static checks"
     steps:
       - uses: actions/checkout@master
       - uses: actions/setup-python@v1
@@ -84,19 +83,13 @@ jobs:
       - name: "Build CI image"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Static checks"
-        if: success()
-        run: |
-          python -m pip install pre-commit \
-              --constraint requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt
-          ./scripts/ci/static_checks/ci_run_static_checks.sh
+        run: ./scripts/ci/static_checks/ci_run_static_checks.sh
   docs:
     timeout-minutes: 60
     name: "Build docs"
     runs-on: ubuntu-latest
     needs:
       - cancel-previous-workflow-run
-    env:
-      CI_JOB_TYPE: "Documentation"
     steps:
       - uses: actions/checkout@master
       - uses: actions/setup-python@v1
@@ -142,7 +135,6 @@ jobs:
       BACKEND: postgres
       TEST_TYPE: ${{ matrix.test-type }}
       RUN_TESTS: "true"
-      CI_JOB_TYPE: "Tests"
       SKIP_CI_IMAGE_CHECK: "true"
       RUNTIME: "kubernetes"
       ENABLE_KIND_CLUSTER: "true"
@@ -173,8 +165,7 @@ jobs:
           cache-name: cache-kubernetes-tests-virtualenv-v4
         with:
           path: .build/.kubernetes_venv
-          key: "${{ env.cache-name }}-${{ github.job }}-\
-${{ hashFiles('requirements/requirements-python${{matrix.python-version}}.txt') }}"
+          key: "${{ env.cache-name }}-${{ github.job }}-v1"
       - name: "Tests"
         run: ./scripts/ci/kubernetes/ci_run_kubernetes_tests.sh
       - uses: actions/upload-artifact@v2
@@ -201,7 +192,6 @@ ${{ hashFiles('requirements/requirements-python${{matrix.python-version}}.txt')
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       POSTGRES_VERSION: ${{ matrix.postgres-version }}
       RUN_TESTS: "true"
-      CI_JOB_TYPE: "Tests"
       TEST_TYPE: ${{ matrix.test-type }}
     if: needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request'
     steps:
@@ -232,7 +222,6 @@ ${{ hashFiles('requirements/requirements-python${{matrix.python-version}}.txt')
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       MYSQL_VERSION: ${{ matrix.mysql-version }}
       RUN_TESTS: "true"
-      CI_JOB_TYPE: "Tests"
       TEST_TYPE: ${{ matrix.test-type }}
     if: needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request'
     steps:
@@ -262,7 +251,6 @@ ${{ hashFiles('requirements/requirements-python${{matrix.python-version}}.txt')
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       TEST_TYPE: ${{ matrix.test-type }}
       RUN_TESTS: "true"
-      CI_JOB_TYPE: "Tests"
     if: needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request'
     steps:
       - uses: actions/checkout@master
@@ -293,7 +281,6 @@ ${{ hashFiles('requirements/requirements-python${{matrix.python-version}}.txt')
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       POSTGRES_VERSION: ${{ matrix.postgres-version }}
       RUN_TESTS: "true"
-      CI_JOB_TYPE: "Tests"
       TEST_TYPE: ${{ matrix.test-type }}
     if: needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request'
     steps:
@@ -314,38 +301,11 @@ ${{ hashFiles('requirements/requirements-python${{matrix.python-version}}.txt')
     runs-on: ubuntu-latest
     needs:
       - cancel-previous-workflow-run
-    env:
-      CI_JOB_TYPE: "Tests"
     steps:
       - uses: actions/checkout@master
       - name: "Helm Tests"
         run: ./scripts/ci/kubernetes/ci_run_helm_testing.sh
 
-  requirements:
-    timeout-minutes: 80
-    name: "Requirements"
-    runs-on: ubuntu-latest
-    strategy:
-      matrix:
-        python-version: [2.7, 3.5, 3.6, 3.7, 3.8]
-      fail-fast: false
-    needs:
-      - cancel-previous-workflow-run
-    env:
-      PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
-      CHECK_REQUIREMENTS_ONLY: true
-      UPGRADE_WHILE_GENERATING_REQUIREMENTS: ${{ github.event_name == 'schedule' }}
-      CI_JOB_TYPE: "Requirements"
-    steps:
-      - uses: actions/checkout@master
-      - uses: actions/setup-python@v1
-      - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
-      - name: "Build CI image ${{ matrix.python-version }}"
-        run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
-      - name: "Generate requirements"
-        run: ./scripts/ci/requirements/ci_generate_requirements.sh
-
   build-prod-image:
     timeout-minutes: 60
     name: "Build prod image Py${{ matrix.python-version }}"
@@ -355,7 +315,6 @@ ${{ hashFiles('requirements/requirements-python${{matrix.python-version}}.txt')
         python-version: [2.7, 3.5, 3.6, 3.7, 3.8]
     env:
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
-      CI_JOB_TYPE: "Prod image"
     steps:
       - uses: actions/checkout@master
       - name: "Build PROD image ${{ matrix.python-version }}"
@@ -369,16 +328,16 @@ ${{ hashFiles('requirements/requirements-python${{matrix.python-version}}.txt')
       - tests-sqlite
       - tests-postgres
       - tests-mysql
-      - requirements
       - build-prod-image
       - docs
-    if: github.ref == 'refs/heads/master' && github.event_name != 'schedule'
+    if: |
+      (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test') &&
+      github.event_name != 'schedule'
     strategy:
       matrix:
         python-version: [2.7, 3.5, 3.6, 3.7, 3.8]
     env:
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
-      CI_JOB_TYPE: "Prod image"
     steps:
       - uses: actions/checkout@master
       - name: "Free space"
@@ -396,12 +355,9 @@ ${{ hashFiles('requirements/requirements-python${{matrix.python-version}}.txt')
       - tests-sqlite
       - tests-postgres
       - tests-mysql
-      - requirements
-      - build-prod-image
       - docs
     if: |
-      (github.ref == 'refs/heads/master' ||
-      github.ref == 'refs/heads/v1-10-test' ) &&
+      (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' ) &&
       github.event_name != 'schedule'
     strategy:
       matrix:
@@ -409,7 +365,6 @@ ${{ hashFiles('requirements/requirements-python${{matrix.python-version}}.txt')
     env:
       PULL_PYTHON_BASE_IMAGES_FROM_CACHE: "false"
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
-      CI_JOB_TYPE: "Prod image"
     steps:
       - uses: actions/checkout@master
       - name: "Free space"
@@ -418,3 +373,91 @@ ${{ hashFiles('requirements/requirements-python${{matrix.python-version}}.txt')
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Push CI image ${{ matrix.python-version }}"
         run: ./scripts/ci/images/ci_push_ci_image.sh
+
+  constraints:
+    timeout-minutes: 80
+    name: "Constraints"
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        python-version: [2.7, 3.5, 3.6, 3.7, 3.8]
+      fail-fast: false
+    needs:
+      - cancel-previous-workflow-run
+      - tests-sqlite
+      - tests-mysql
+      - tests-postgres
+      - tests-kubernetes
+    env:
+      PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
+    if: |
+      github.event_name == 'push' &&
+      (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test')
+    steps:
+      - uses: actions/checkout@master
+      - uses: actions/setup-python@v1
+      - name: "Free space"
+        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+      - name: "Build CI image ${{ matrix.python-version }}"
+        run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
+      - name: "Generate constraints"
+        run: ./scripts/ci/constraints/ci_generate_constraints.sh
+      - uses: actions/upload-artifact@v2
+        name: Upload constraint artifacts
+        with:
+          name: 'constraints-${{matrix.python-version}}'
+          path: 'files/constraints-${{matrix.python-version}}/constraints-${{matrix.python-version}}.txt'
+
+  constraints-push:
+    timeout-minutes: 10
+    name: "Constraints push"
+    runs-on: ubuntu-latest
+    needs:
+      - constraints
+    if: |
+      github.event_name == 'push' &&
+      (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test')
+    steps:
+      - name: "Set constraints branch name"
+        id: constraints-branch
+        run: |
+          if [[ ${GITHUB_REF} == 'refs/heads/master' ]]; then
+              echo "::set-output name=branch::constraints-master"
+          elif [[ ${GITHUB_REF} == 'refs/heads/v1-10-test' ]]; then
+              echo "::set-output name=branch::constraints-1-10"
+          else
+              echo
+              echo "Unexpected ref ${GITHUB_REF}. Exiting!"
+              echo
+              exit 1
+          fi
+      - uses: actions/checkout@v2
+        with:
+          path: "repo"
+          ref: ${{ steps.constraints-branch.outputs.branch }}
+      - uses: actions/download-artifact@v2
+        with:
+          path: 'artifacts'
+        name: "Get all artifacts (constraints)"
+      - name: "Commit changed constraint files"
+        run: |
+          cp -v ./artifacts/constraints-*/constraints*.txt repo/
+          cd repo
+          git config --local user.email "dev@airflow.apache.org"
+          git config --local user.name "Automated Github Actions commit"
+          git diff --exit-code || git commit --all --message "Updating constraints. GH run id:${GITHUB_RUN_ID}
+
+          This update in constraints is automatically committed by CI pushing
+          reference '${GITHUB_REF}' to ${GITHUB_REPOSITORY} with commit sha
+          ${GITHUB_SHA}.
+
+          All tests passed in this build so we determined we can push the updated constraints.
+
+          See https://github.com/apache/airflow/blob/master/README.md#installing-from-pypi for details.
+          "
+      - name: Push changes
+        uses: ad-m/github-push-action@master
+        with:
+          github_token: ${{ secrets.GITHUB_TOKEN }}
+          branch: ${{ steps.constraints-branch.outputs.branch }}
+          directory: "repo"
diff --git a/BREEZE.rst b/BREEZE.rst
index c377ec0..5976b0f 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -328,7 +328,7 @@ Managing CI environment:
     * Stop running interactive environment with ``breeze stop`` command
     * Restart running interactive environment with ``breeze restart`` command
     * Run test specified with ``breeze tests`` command
-    * Generate requirements with ``breeze generate-requirements`` command
+    * Generate constraints with ``breeze generate-constraints`` command
     * Execute arbitrary command in the test environment with ``breeze shell`` command
     * Execute arbitrary docker-compose command with ``breeze docker-compose`` command
     * Push docker images with ``breeze push-image`` command (require committer's rights to push images)
@@ -693,40 +693,34 @@ easily identify the location the problems with documentation originated from.
       </a>
     </div>
 
-Generating requirements
------------------------
+Generating constraints
+----------------------
 
-Whenever you modify and commit setup.py, you need to re-generate requirement files. Those requirement
-files ara stored separately for each python version in the ``requirements`` folder. Those are
-constraints rather than requirements as described in detail in the
-`CONTRIBUTING <CONTRIBUTING.rst#pinned-requirement-files>`_ contributing documentation.
+Whenever setup.py gets modified, the CI master job will re-generate constraint files. Those constraint
+files ara stored in separated orphan branches: ``constraints-master`` and ``constraint-1-10``.
+They are stored separately for each python version. Those are
+constraint files as described in detail in the
+`CONTRIBUTING <CONTRIBUTING.rst#pinned-constraint-files>`_ contributing documentation.
 
-In case you modify setup.py you need to update the requirements - for every python version supported.
+In case someone modifies setup.py, the ``CRON`` scheduled CI build automatically upgrades and
+pushes changed to the constraint files, however you can also perform test run of this locally using
+``generate-constraints`` command of Breeze.
 
 .. code-block:: bash
 
-  ./breeze generate-requirements --python 3.6
+  ./breeze generate-constraints --python 3.6
 
 .. code-block:: bash
 
-  ./breeze generate-requirements --python 3.7
+  ./breeze generate-constraints --python 3.7
 
 .. code-block:: bash
 
-  ./breeze generate-requirements --python 3.8
+  ./breeze generate-constraints --python 3.8
 
-
-This bumps requirements to latest versions and stores hash of setup.py so that we are automatically
-upgrading the requirements as we add new ones.
-
-.. raw:: html
-
-    <div align="center">
-      <a href="https://youtu.be/4MCTXq-oF68?t=1823">
-        <img src="images/breeze/overlayed_breeze_generate_requirements.png" width="640"
-             alt="Airflow Breeze - Generate requirements">
-      </a>
-    </div>
+This bumps the constraint files to latest versions and stores hash of setup.py. The generated constraint
+and setup.py hash files are stored in the ``files`` folder and while generating the constraints diff
+of changes vs the previous constraint files is printed.
 
 Using local virtualenv environment in Your Host IDE
 ---------------------------------------------------
@@ -752,7 +746,7 @@ To use your host IDE with Breeze:
 
 .. code-block:: bash
 
-  ./breeze generate-requirements --python 3.8
+  ./breeze initialize-local-virtualenv --python 3.8
 
 4. Select the virtualenv you created as the project's default virtualenv in your IDE.
 
@@ -1050,7 +1044,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
     build-image                              Builds CI or Production docker image
     cleanup-image                            Cleans up the container image created
     exec                                     Execs into running breeze container in new terminal
-    generate-requirements                    Generates pinned requirements for pip dependencies
+    generate-constraints                     Generates pinned constraint files
     push-image                               Pushes images to registry
     initialize-local-virtualenv              Initializes local virtualenv
     setup-autocomplete                       Sets up autocomplete for breeze
@@ -1295,16 +1289,18 @@ This is the current syntax for  `./breeze <./breeze>`_:
   ####################################################################################################
 
 
-  Detailed usage for command: generate-requirements
+  Detailed usage for command: generate-constraints
 
 
-  breeze generate-requirements [FLAGS]
+  breeze generate-constraints [FLAGS]
 
-        Generates pinned requirements from setup.py. Those requirements are generated in requirements
-        directory - separately for different python version. Those requirements are used to run
-        CI builds as well as run repeatable production image builds. You can use those requirements
-        to predictably install released Airflow versions. You should run it always after you update
-        setup.py.
+        Generates pinned constraint files from setup.py. Those files are generated in files folder
+        - separate files for different python version. Those constraint files when pushed to orphan
+        constraint-master and constraint-1-10 branches are used to generate repeatable
+        CI builds as well as run repeatable production image builds. You can use those constraints
+        to predictably install released Airflow versions. This is mainly used to test the constraint
+        generation - constraints are pushed to the orphan branches by a successful scheduled
+        CRON job in CI automatically.
 
   Flags:
 
@@ -1380,8 +1376,8 @@ This is the current syntax for  `./breeze <./breeze>`_:
   breeze initialize-local-virtualenv [FLAGS]
 
         Initializes locally created virtualenv installing all dependencies of Airflow
-        taking into account the frozen requirements from requirements folder.
-        This local virtualenv can be used to aid autocompletion and IDE support as
+        taking into account the constraints for the version specified.
+        This local virtualenv can be used to aid auto-completion and IDE support as
         well as run unit tests directly from the IDE. You need to have virtualenv
         activated before running this command.
 
diff --git a/CI.rst b/CI.rst
index 210cf90..8966073 100644
--- a/CI.rst
+++ b/CI.rst
@@ -70,41 +70,58 @@ CI run types
 The following CI Job runs are currently run for Apache Airflow, and each of the runs have different
 purpose and context.
 
-* **Pull Request Run** - Those runs are results of PR from the forks made by contributors. Most builds
-  for Apache Airflow fall into this category. They are executed in the context of the "Fork", not main
-  Airflow Code Repository which means that they have only "read" permission to all the GitHub resources
-  (container registry, code repository). This is necessary as the code in those PRs (including CI job
-  definition) might be modified by people who are not committers for the Apache Airflow Code Repository.
-  The main purpose of those jobs is to check if PR builds cleanly, if the test run properly and if
-  the PR is ready to review and merge. The runs are using cached images from the Private GitHub registry -
-  CI, Production Images as well as base Python images that are also cached in the Private GitHub registry.
-
-* **Direct Push/Merge Run** - Those runs are results of direct pushes done by the committers or as result
-  of merge of a Pull Request by the committers. Those runs execute in the context of the Apache Airflow
-  Code Repository and have also write permission for GitHub resources (container registry, code repository).
-  The main purpose for the run is to check if the code after merge still holds all the assertions - like
-  whether it still builds, all tests are green. This is needed because some of the conflicting changes from
-  multiple PRs might cause build and test failures after merge even if they do not fail in isolation. Also
-  those runs are already reviewed and confirmed by the committers so they can be used to do some housekeeping
-  - for now they are pushing most recent image build in the PR to the Github Private Registry - which is our
-  image cache for all the builds. Another purpose of those runs is to refresh latest Python base images.
-  Python base images are refreshed with varying frequency (once every few months usually but sometimes
-  several times per week) with the latest security and bug fixes. Those patch level images releases can
-  occasionally break Airflow builds (specifically Docker image builds based on those images) therefore
-  in PRs we always use latest "good" python image that we store in the private GitHub cache. The direct
-  push/master builds are not using registry cache to pull the python images - they are directly
-  pulling the images from DockerHub, therefore they will try the latest images after they are released
-  and in case they are fine, CI Docker image is build and tests are passing - those jobs will push the base
-  images to the private GitHub Registry so that they be used by subsequent PR runs.
-
-* **Scheduled Run** - those runs are results of (nightly) triggered jobs - only for well-defined branches:
-  ``master`` and ``v1-10-test`` they execute nightly. Their main purpose is to check if there was no impact
-  of external dependency changes on the Apache Airflow code (for example transitive dependencies released
-  that fail the build). They also check if the Docker images can be build from the scratch (again - to see
-  if some dependencies have not changed - for example downloaded package releases etc. Another reason for
-  the nightly build is that the builds tags most recent master or v1-10-test code with "master-nightly" and
-  "v1-10-test" tags respectively so that DockerHub build can pick up the moved tag and prepare a nightly
-  "public" build in the DockerHub.
+Pull request run
+----------------
+
+Those runs are results of PR from the forks made by contributors. Most builds for Apache Airflow fall
+into this category. They are executed in the context of the "Fork", not main
+Airflow Code Repository which means that they have only "read" permission to all the GitHub resources
+(container registry, code repository). This is necessary as the code in those PRs (including CI job
+definition) might be modified by people who are not committers for the Apache Airflow Code Repository.
+
+The main purpose of those jobs is to check if PR builds cleanly, if the test run properly and if
+the PR is ready to review and merge. The runs are using cached images from the Private GitHub registry -
+CI, Production Images as well as base Python images that are also cached in the Private GitHub registry.
+Also for those builds we only execute Python tests if important files changed (so for example if it is
+doc-only change, no tests will be executed.
+
+Direct Push/Merge Run
+---------------------
+
+Those runs are results of direct pushes done by the committers or as result of merge of a Pull Request
+by the committers. Those runs execute in the context of the Apache Airflow Code Repository and have also
+write permission for GitHub resources (container registry, code repository).
+The main purpose for the run is to check if the code after merge still holds all the assertions - like
+whether it still builds, all tests are green.
+
+This is needed because some of the conflicting changes from multiple PRs might cause build and test failures
+after merge even if they do not fail in isolation. Also those runs are already reviewed and confirmed by the
+committers so they can be used to do some housekeeping:
+- pushing most recent image build in the PR to the Github Private Registry (for caching)
+- upgrading to latest constraints and pushing those constraints if all tests succeed
+- refresh latest Python base images in case new patch-level is released
+
+The housekeeping is important - Python base images are refreshed with varying frequency (once every few months
+usually but sometimes several times per week) with the latest security and bug fixes.
+Those patch level images releases can occasionally break Airflow builds (specifically Docker image builds
+based on those images) therefore in PRs we only use latest "good" python image that we store in the
+private GitHub cache. The direct push/master builds are not using registry cache to pull the python images
+- they are directly pulling the images from DockerHub, therefore they will try the latest images
+after they are released and in case they are fine, CI Docker image is build and tests are passing -
+those jobs will push the base images to the private GitHub Registry so that they be used by subsequent
+PR runs.
+
+Scheduled runs
+--------------
+
+Those runs are results of (nightly) triggered job - only for ``master`` branch. The
+main purpose of the job is to check if there was no impact of external dependency changes on the Apache
+Airflow code (for example transitive dependencies released that fail the build). It also checks if the
+Docker images can be build from the scratch (again - to see if some dependencies have not changed - for
+example downloaded package releases etc. Another reason for the nightly build is that the builds tags most
+recent master with ``nightly-master`` tag so that DockerHub build can pick up the moved tag and prepare a
+nightly public master build in the DockerHub registry. The ``v1-10-test`` branch images are build in
+DockerHub when pushing ``v1-10-stable`` manually.
 
 All runs consist of the same jobs, but the jobs behave slightly differently or they are skipped in different
 run categories. Here is a summary of the run categories with regards of the jobs they are running.
@@ -115,31 +132,35 @@ Those jobs often have matrix run strategy which runs several different variation
 | Job                       | Description                                                                                                    | Pull Request Run                   | Direct Push/Merge Run           | Scheduled Run                                                        |
 |                           |                                                                                                                |                                    |                                 |   (*) Builds all images from scratch                                 |
 +===========================+================================================================================================================+====================================+=================================+======================================================================+
-| Static checks 1           | Performs first set of static checks                                                                            | Yes                                | Yes                             | Yes *                                                                |
+| Cancel previous workflow  | Cancels the previously running workflow run if there is one running                                            | Yes                                | Yes                             | Yes *                                                                |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
-| Static checks 2           | Performs second set of static checks                                                                           | Yes                                | Yes                             | Yes *                                                                |
+| Static checks             | Performs static checks                                                                                         | Yes                                | Yes                             | Yes *                                                                |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
 | Docs                      | Builds documentation                                                                                           | Yes                                | Yes                             | Yes *                                                                |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
+| Prepare Backport packages | Prepares Backport Packages for 1.10.*                                                                          | Yes                                | Yes                             | Yes *                                                                |
++---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
+| Trigger tests             | Checks if tests should be triggered                                                                            | Yes                                | Yes                             | Yes *                                                                |
++---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
 | Build Prod Image          | Builds production image                                                                                        | Yes                                | Yes                             | Yes *                                                                |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
-| Prepare Backport packages | Prepares Backport Packages for 1.10.*                                                                          | Yes                                | Yes                             | Yes *                                                                |
+| Tests                     | Run all the combinations of Pytest tests for Python code                                                       | Yes (if tests-triggered)           | Yes                             | Yes *                                                                |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
-| Pyfiles                   | Counts how many python files changed in the  change.Used to determine if tests should be run                   | Yes                                | Yes (but it is not used)        | Yes (but it is not used)                                             |
+| Tests Kubernetes          | Run Kubernetes test                                                                                            | Yes (if tests-triggered)           | Yes                             | Yes *                                                                |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
-| Tests                     | Run all the combinations of Pytest tests for Python code                                                       | Yes (if pyfiles count >0)          | Yes                             | Yes*                                                                 |
+| Quarantined tests         | Those are tests that are flaky and we need to fix them                                                         | Yes (if tests-triggered)           | Yes                             | Yes *                                                                |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
-| Quarantined tests         | Those are tests that are flaky and we need to fix them                                                         | Yes (if pyfiles count >0)          | Yes                             | Yes *                                                                |
+| Test OpenAPI client gen   | Tests if OpenAPIClient continues to generate                                                                   | Yes                                | Yes                             | Yes *                                                                |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
-| Requirements              | Checks if requirement constraints in the code are up-to-date                                                   | Yes (fails if missing requirement) | Yes (fails missing requirement) | Yes (Eager dependency upgrade - does not fail changed requirements)  |
+| Helm tests                | Runs tests for the Helm chart                                                                                  | Yes                                | Yes                             | Yes *                                                                |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
-| Pull python from cache    | Pulls Python base images from Github Private Image registry to keep the last good python image used in PRs     | Yes                                | No                              | -                                                                    |
+| Constraints               | Upgrade constraints to latest eagerly pushed ones (only if tests successful)                                   | -                                  | Yes                             | Yes *                                                                    |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
-| Push python to cache      | Pushes Python base images to Github Private Image registry - checks if latest image is fine and pushes if so   | No                                 | Yes                             | -                                                                    |
+| Constraints push          | Pushes updated constraints (only if tests successful)                                                          | -                                  | Yes                             | -                                                                    |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
-| Push Prod image           | Pushes production images to GitHub Private Image Registry to cache the build images for following runs         | -                                  | Yes                             | -                                                                    |
+| Push Prod images          | Pushes production images to GitHub Private Image Registry to cache the build images for following runs         | -                                  | Yes                             | -                                                                    |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
-| Push CI image             | Pushes CI images to GitHub Private Image Registry to cache the build images for following runs                 | -                                  | Yes                             | -                                                                    |
+| Push CI images            | Pushes CI images to GitHub Private Image Registry to cache the build images for following runs                 | -                                  | Yes                             | -                                                                    |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
 | Tag Repo nightly          | Tags the repository with nightly tagIt is a lightweight tag that moves nightly                                 | -                                  | -                               | Yes. Triggers DockerHub build for public registry                    |
 +---------------------------+----------------------------------------------------------------------------------------------------------------+------------------------------------+---------------------------------+----------------------------------------------------------------------+
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index e77a526..bbe5d8d 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -333,7 +333,7 @@ Airflow dependencies
 Airflow is not a standard python project. Most of the python projects fall into one of two types -
 application or library. As described in
 [StackOverflow Question](https://stackoverflow.com/questions/28509481/should-i-pin-my-python-dependencies-versions)
-decision whether to pin (freeze) requirements for a python project depdends on the type. For
+decision whether to pin (freeze) dependency versions for a python project depends on the type. For
 applications, dependencies should be pinned, but for libraries, they should be open.
 
 For application, pinning the dependencies makes it more stable to install in the future - because new
@@ -343,76 +343,61 @@ be open to allow several different libraries with the same requirements to be in
 The problem is that Apache Airflow is a bit of both - application to install and library to be used when
 you are developing your own operators and DAGs.
 
-This - seemingly unsolvable - puzzle is solved by having pinned requirement files. Those are available
-as of airflow 1.10.10.
+This - seemingly unsolvable - puzzle is solved by having pinned constraints files. Those are available
+as of airflow 1.10.10 and further improved with 1.10.12 (moved to separate orphan branches)
 
-Pinned requirement files
-------------------------
+Pinned constraint files
+-----------------------
 
 By default when you install ``apache-airflow`` package - the dependencies are as open as possible while
-still allowing the apache-airflow package to install. This means that 'apache-airflow' package might fail to
+still allowing the apache-airflow package to install. This means that ``apache-airflow`` package might fail to
 install in case a direct or transitive dependency is released that breaks the installation. In such case
 when installing ``apache-airflow``, you might need to provide additional constraints (for
 example ``pip install apache-airflow==1.10.2 Werkzeug<1.0.0``)
 
-However we now have ``requirements-python<PYTHON_MAJOR_MINOR_VERSION>.txt`` file generated
-automatically and committed in the requirements folder based on the set of all latest working and tested
-requirement versions. Those ``requirement-python<PYTHON_MAJOR_MINOR_VERSION>.txt`` files can be used as
-constraints file when installing Apache Airflow - either from the sources
+However we now have ``constraints-<PYTHON_MAJOR_MINOR_VERSION>.txt`` files generated
+automatically and committed to orphan ``constraints-master`` and ``constraint-1-10`` branches based on
+the set of all latest working and tested dependency versions. Those
+``constraints-<PYTHON_MAJOR_MINOR_VERSION>.txt`` files can be used as
+constraints file when installing Apache Airflow - either from the sources:
 
 .. code-block:: bash
 
-  pip install -e . --constraint requirements/requirements-python3.6.txt
+  pip install -e . \
+    --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1-10/constraints-3.6.txt"
 
 
-or from the pypi package
+or from the pypi package:
 
 .. code-block:: bash
 
-  pip install apache-airflow --constraint requirements/requirements-python3.6.txt
+  pip install apache-airflow \
+    --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1-10/constraints-3.6.txt"
 
 
 This works also with extras - for example:
 
 .. code-block:: bash
 
-  pip install .[gcp] --constraint requirements/requirements-python3.6.txt
+  pip install .[ssh] \
+    --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt"
 
 
-It is also possible to use constraints directly from github using tag/version name:
+As of apache-airflow 1.10.12 it is also possible to use constraints directly from github using specific
+tag/hash name. We tag commits working for particular release with constraints-<version> tag. So for example
+fixed valid constraints 1.10.12 can be used by using ``constraints-1.10.12`` tag:
 
 .. code-block:: bash
 
-  pip install apache-airflow[gcp]==1.10.10 \
-      --constraint https://raw.githubusercontent.com/apache/airflow/1.10.10/requirements/requirements-python3.6.txt
+  pip install apache-airflow[ssh]==1.10.12 \
+      --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1.10.12/constraints-3.6.txt"
 
-There are different set of fixed requirements for different python major/minor versions and you should
-use the right requirements file for the right python version.
+There are different set of fixed constraint files for different python major/minor versions and you should
+use the right file for the right python version.
 
-The ``requirements-python<PYTHON_MAJOR_MINOR_VERSION>.txt`` file MUST be regenerated every time after
-the ``setup.py`` is updated. This is checked automatically in the CI build. There are separate
-jobs for each python version that checks if the requirements should be updated.
-
-If they are not updated, you should regenerate the requirements locally using Breeze as described below.
-
-Generating requirement files
-----------------------------
-
-This should be done every time after you modify setup.py file. You can generate requirement files
-using `Breeze <BREEZE.rst>`_ . Simply use those commands:
-
-.. code-block:: bash
-
-  breeze generate-requirements --python 3.7
-
-.. code-block:: bash
-
-  breeze generate-requirements --python 3.6
-
-Note that when you generate requirements this way, you might update to latest version of requirements
-that were released since the last time so during tests you might get errors unrelated to your change.
-In this case the easiest way to fix it is to limit the culprit dependency to the previous version
-with ``<NNNN.NN>`` constraint added in setup.py.
+The ``constraints-<PYTHON_MAJOR_MINOR_VERSION>.txt`` will be automatically regenerated by CI cron job
+every time after the ``setup.py`` is updated and pushed if the tests are successful. There are separate
+jobs for each python version.
 
 Backport providers packages
 ---------------------------
diff --git a/Dockerfile b/Dockerfile
index c06105d..98cf3dc 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -168,12 +168,15 @@ ARG AIRFLOW_EXTRAS
 ARG ADDITIONAL_AIRFLOW_EXTRAS=""
 ENV AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS}${ADDITIONAL_AIRFLOW_EXTRAS:+,}${ADDITIONAL_AIRFLOW_EXTRAS}
 
+ARG AIRFLOW_CONSTRAINTS_REFERENCE="constraints-master"
+ARG AIRFLOW_CONSTRAINTS_URL="https://raw.githubusercontent.com/apache/airflow/${AIRFLOW_CONSTRAINTS_REFERENCE}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt"
+ENV AIRFLOW_CONSTRAINTS_URL=${AIRFLOW_CONSTRAINTS_URL}
+
 # In case of Production build image segment we want to pre-install master version of airflow
 # dependencies from github so that we do not have to always reinstall it from the scratch.
 RUN pip install --user \
     "https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz#egg=apache-airflow[${AIRFLOW_EXTRAS}]" \
-        --constraint "https://raw.githubusercontent.com/${AIRFLOW_REPO}/${AIRFLOW_BRANCH}/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt" \
-    && pip uninstall --yes apache-airflow;
+        --constraint "${AIRFLOW_CONSTRAINTS_URL}" && pip uninstall --yes apache-airflow;
 
 ARG AIRFLOW_SOURCES_FROM="."
 ENV AIRFLOW_SOURCES_FROM=${AIRFLOW_SOURCES_FROM}
@@ -198,20 +201,14 @@ ENV AIRFLOW_INSTALL_SOURCES=${AIRFLOW_INSTALL_SOURCES}
 ARG AIRFLOW_INSTALL_VERSION=""
 ENV AIRFLOW_INSTALL_VERSION=${AIRFLOW_INSTALL_VERSION}
 
-ARG CONSTRAINT_REQUIREMENTS="requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
-ENV CONSTRAINT_REQUIREMENTS=${CONSTRAINT_REQUIREMENTS}
-
 WORKDIR /opt/airflow
 
-# hadolint ignore=DL3020
-ADD "${CONSTRAINT_REQUIREMENTS}" /requirements.txt
-
 ENV PATH=${PATH}:/root/.local/bin
 
 RUN pip install --user "${AIRFLOW_INSTALL_SOURCES}[${AIRFLOW_EXTRAS}]${AIRFLOW_INSTALL_VERSION}" \
-    --constraint /requirements.txt && \
+    --constraint "${AIRFLOW_CONSTRAINTS_URL}" && \
     if [ -n "${ADDITIONAL_PYTHON_DEPS}" ]; then pip install --user ${ADDITIONAL_PYTHON_DEPS} \
-    --constraint /requirements.txt; fi && \
+    --constraint "${AIRFLOW_CONSTRAINTS_URL}"; fi && \
     find /root/.local/ -name '*.pyc' -print0 | xargs -0 rm -r && \
     find /root/.local/ -type d -name '__pycache__' -print0 | xargs -0 rm -r
 
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 2b2157a..5d4f240 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -29,9 +29,6 @@ ENV AIRFLOW_VERSION=$AIRFLOW_VERSION
 ARG PYTHON_MAJOR_MINOR_VERSION="3.6"
 ENV PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION}
 
-ARG UPGRADE_TO_LATEST_REQUIREMENTS="false"
-ENV UPGRADE_TO_LATEST_REQUIREMENTS=${UPGRADE_TO_LATEST_REQUIREMENTS}
-
 # Print versions
 RUN echo "Base image: ${PYTHON_BASE_IMAGE}"
 RUN echo "Airflow version: ${AIRFLOW_VERSION}"
@@ -214,6 +211,10 @@ ENV AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS}${ADDITIONAL_AIRFLOW_EXTRAS:+,}${ADDITIONAL_
 
 RUN echo "Installing with extras: ${AIRFLOW_EXTRAS}."
 
+ARG AIRFLOW_CONSTRAINTS_REFERENCE="constraints-master"
+ARG AIRFLOW_CONSTRAINTS_URL="https://raw.githubusercontent.com/apache/airflow/${AIRFLOW_CONSTRAINTS_REFERENCE}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt"
+ENV AIRFLOW_CONSTRAINTS_URL=${AIRFLOW_CONSTRAINTS_URL}
+
 # By changing the CI build epoch we can force reinstalling Arflow from the current master
 # It can also be overwritten manually by setting the AIRFLOW_CI_BUILD_EPOCH environment variable.
 ARG AIRFLOW_CI_BUILD_EPOCH="1"
@@ -225,8 +226,7 @@ ENV AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH}
 # And is automatically reinstalled from the scratch every time patch release of python gets released
 RUN pip install \
     "https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz#egg=apache-airflow[${AIRFLOW_EXTRAS}]" \
-        --constraint "https://raw.githubusercontent.com/${AIRFLOW_REPO}/${AIRFLOW_BRANCH}/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt" \
-    && pip uninstall --yes apache-airflow;
+        --constraint "${AIRFLOW_CONSTRAINTS_URL}" && pip uninstall --yes apache-airflow;
 
 
 # Link dumb-init for backwards compatibility (so that older images also work)
@@ -252,20 +252,21 @@ COPY airflow/version.py ${AIRFLOW_SOURCES}/airflow/version.py
 COPY airflow/__init__.py ${AIRFLOW_SOURCES}/airflow/__init__.py
 COPY airflow/bin/airflow ${AIRFLOW_SOURCES}/airflow/bin/airflow
 
-COPY requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt \
-        ${AIRFLOW_SOURCES}/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt
+ARG UPGRADE_TO_LATEST_CONSTRAINTS="false"
+ENV UPGRADE_TO_LATEST_CONSTRAINTS=${UPGRADE_TO_LATEST_CONSTRAINTS}
 
 # The goal of this line is to install the dependencies from the most current setup.py from sources
 # This will be usually incremental small set of packages in CI optimized build, so it will be very fast
 # In non-CI optimized build this will install all dependencies before installing sources.
-# Usually we will install versions constrained to the current requirements file
+# Usually we will install versions constrained to the current constraints file
 # But in cron job we will install latest versions matching setup.py to see if there is no breaking change
+# and push the constraints if everything is successful
 RUN \
-    if [[ "${UPGRADE_TO_LATEST_REQUIREMENTS}" == "true" ]]; then \
+    if [[ "${UPGRADE_TO_LATEST_CONSTRAINTS}" == "true" ]]; then \
         pip install -e ".[${AIRFLOW_EXTRAS}]" --upgrade --upgrade-strategy eager; \
     else \
         pip install -e ".[${AIRFLOW_EXTRAS}]" \
-            --constraint  ${AIRFLOW_SOURCES}/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt ; \
+            --constraint "${AIRFLOW_CONSTRAINTS_URL}" ; \
     fi
 
 # Copy all the www/ files we need to compile assets. Done as two separate COPY
diff --git a/IMAGES.rst b/IMAGES.rst
index b1890e1..6cdc3b2 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -92,25 +92,23 @@ parameter to Breeze:
 
 .. code-block:: bash
 
-  ./breeze build-image --python 3.7 --extras=gcp --production-image --install-airflow-version=1.10.9
+  ./breeze build-image --python 3.7 --extras=gcp --production-image --install-airflow-version=1.10.12
 
 This will build the image using command similar to:
 
 .. code-block:: bash
 
-    pip install apache-airflow[sendgrid]==1.10.9 \
-       --constraint https://raw.githubusercontent.com/apache/airflow/v1-10-test/requirements/requirements-python3.7.txt
-
-The requirement files only appeared in version 1.10.10 of airflow so if you install
-an earlier version -  both constraint and requirements should point to 1.10.10 version.
+    pip install apache-airflow[sendgrid]==1.10.12 \
+      --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1.10.12/constraints-3.6.txt"
 
 You can also build production images from specific Git version via providing ``--install-airflow-reference``
-parameter to Breeze:
+parameter to Breeze (this time constraints are taken from the ``constraints-master`` branch which is the
+HEAD of development for constraints):
 
 .. code-block:: bash
 
-    pip install https://github.com/apache/airflow/archive/<tag>.tar.gz#egg=apache-airflow \
-       --constraint https://raw.githubusercontent.com/apache/airflow/<tag>/requirements/requirements-python3.7.txt
+    pip install "https://github.com/apache/airflow/archive/<tag>.tar.gz#egg=apache-airflow" \
+      --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt"
 
 Using cache during builds
 =========================
@@ -381,23 +379,19 @@ The following build arguments (``--build-arg`` in docker build command) can be u
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_VERSION``                      | ``2.0.0.dev0``                           | version of Airflow                       |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_ORG``                          | ``apache``                               | Github organisation from which Airflow   |
-|                                          |                                          | is installed (when installed from repo)  |
-+------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_REPO``                         | ``apache/airflow``                       | the repository from which PIP            |
 |                                          |                                          | dependencies are pre-installed           |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_BRANCH``                       | ``master``                               | the branch from which PIP dependencies   |
-|                                          |                                          | are pre-installed                        |
-+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_GIT_REFERENCE``                | ``master``                               | reference (branch or tag) from Github    |
-|                                          |                                          | repository from which Airflow is         |
-|                                          |                                          | installed (when installed from repo)     |
+|                                          |                                          | are pre-installed initially              |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``REQUIREMENTS_GIT_REFERENCE``           | ``master``                               | reference (branch or tag) from Github    |
-|                                          |                                          | repository from which requirements are   |
-|                                          |                                          | downloaded for constraints (when         |
-|                                          |                                          | installed from repo).                    |
+| ``AIRFLOW_CONSTRAINTS_REFERENCE``        | ``constraints-master``                   | reference (branch or tag) from Github    |
+|                                          |                                          | repository from which constraints are    |
+|                                          |                                          | used. By default it is set to            |
+|                                          |                                          | ``constraints-master`` but can be        |
+|                                          |                                          | ``constraints-1-10`` for 1.10.* versions |
+|                                          |                                          | or it could point to specific version    |
+|                                          |                                          | for example ``constraints-1.10.12``      |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_EXTRAS``                       | (see Dockerfile)                         | Default extras with which airflow is     |
 |                                          |                                          | installed                                |
@@ -462,10 +456,14 @@ production image. There are three types of build:
 |                                   | set Airflow version for example   |
 |                                   | "==1.10.10"                       |
 +-----------------------------------+-----------------------------------+
-| ``CONSTRAINT_REQUIREMENTS``       | Should point to requirements file |
-|                                   | in case of installation from      |
-|                                   | the package or from GitHub URL.   |
-|                                   | See examples below                |
+| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | reference (branch or tag) from    |
+|                                   | Github where constraints file     |
+|                                   | is taken from. By default it is   |
+|                                   | ``constraints-master`` but can be |
+|                                   | ``constraints-1-10`` for 1.10.*   |
+|                                   | constraint or if you want to      |
+|                                   | point to specific varsion         |
+|                                   | ``constraints-1.10.12             |
 +-----------------------------------+-----------------------------------+
 | ``AIRFLOW_WWW``                   | In case of Airflow 2.0 it should  |
 |                                   | be "www", in case of Airflow 1.10 |
@@ -495,24 +493,22 @@ of 2.0 currently):
 
   docker build .
 
-This builds the production image in version 3.7 with default extras from 1.10.9 tag and
-requirements taken from v1-10-test branch in Github.
-Note that versions 1.10.9 and below have no requirements so requirements should be taken from head of
-the 1.10.10 tag.
+This builds the production image in version 3.7 with default extras from 1.10.12 tag and
+constraints taken from constraints-1-10-12 branch in Github.
 
 .. code-block:: bash
 
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="https://github.com/apache/airflow/archive/1.10.10.tar.gz#egg=apache-airflow" \
-    --build-arg CONSTRAINT_REQUIREMENTS="https://raw.githubusercontent.com/apache/airflow/1.10.10/requirements/requirements-python3.7.txt" \
+    --build-arg AIRFLOW_INSTALL_SOURCES="https://github.com/apache/airflow/archive/1.10.12.tar.gz#egg=apache-airflow" \
+    --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_BRANCH="v1-10-test" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty"
 
-This builds the production image in version 3.7 with default extras from 1.10.10 Pypi package and
-requirements taken from 1.10.10 tag in Github and pre-installed pip dependencies from the top
+This builds the production image in version 3.7 with default extras from 1.10.12 Pypi package and
+constraints taken from 1.10.12 tag in Github and pre-installed pip dependencies from the top
 of v1-10-test branch.
 
 .. code-block:: bash
@@ -521,15 +517,14 @@ of v1-10-test branch.
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
     --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
-    --build-arg AIRFLOW_INSTALL_VERSION="==1.10.10" \
+    --build-arg AIRFLOW_INSTALL_VERSION="==1.10.12" \
     --build-arg AIRFLOW_BRANCH="v1-10-test" \
-    --build-arg CONSTRAINT_REQUIREMENTS="https://raw.githubusercontent.com/apache/airflow/1.10.10/requirements/requirements-python3.7.txt" \
+    --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1.10.12" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty"
 
 This builds the production image in version 3.7 with additional airflow extras from 1.10.10 Pypi package and
-additional python dependencies and pre-installed pip dependencies from the top
-of v1-10-test branch.
+additional python dependencies and pre-installed pip dependencies from 1.10.10 tagged constraints.
 
 .. code-block:: bash
 
@@ -539,7 +534,7 @@ of v1-10-test branch.
     --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.10" \
     --build-arg AIRFLOW_BRANCH="v1-10-test" \
-    --build-arg CONSTRAINT_REQUIREMENTS="https://raw.githubusercontent.com/apache/airflow/1.10.10/requirements/requirements-python3.7.txt" \
+    --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1.10.10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
     --build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs"
@@ -554,8 +549,8 @@ additional apt dev and runtime dependencies.
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
     --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
-    --build-arg AIRFLOW_INSTALL_VERSION="==1.10.10" \
-    --build-arg CONSTRAINT_REQUIREMENTS="https://raw.githubusercontent.com/apache/airflow/1.10.11/requirements/requirements-python3.7.txt" \
+    --build-arg AIRFLOW_INSTALL_VERSION="==1.10.12" \
+    --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
     --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc"
diff --git a/INSTALL b/INSTALL
index 40fed43..7e633cb 100644
--- a/INSTALL
+++ b/INSTALL
@@ -35,11 +35,12 @@ pip install .
 python setup.py install
 
 # You can also install recommended version of the dependencies by using
-# requirements-python<PYTHON_MAJOR_MINOR_VERSION>.txt as constraint file. This is needed in case
+# constraint-python<PYTHON_MAJOR_MINOR_VERSION>.txt files as constraint file. This is needed in case
 # you have problems with installing the current requirements from PyPI.
-# There are different requirements for different python versions. For example"
+# There are different constraint files for different python versions. For example"
 
-pip install . --constraint requirements/requirements-python3.7.txt
+pip install . \
+  --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt"
 
 # You can also install Airflow with extras specified. The list of available extras:
 # START EXTRAS HERE
diff --git a/LOCAL_VIRTUALENV.rst b/LOCAL_VIRTUALENV.rst
index b744fc3..8a20c02 100644
--- a/LOCAL_VIRTUALENV.rst
+++ b/LOCAL_VIRTUALENV.rst
@@ -36,11 +36,11 @@ These are examples of the development options available with the local virtualen
 
 * local debugging;
 * Airflow source view;
-* autocompletion;
+* auto-completion;
 * documentation support;
 * unit tests.
 
-This document describes minimum requirements and insructions for using a standalone version of the local virtualenv.
+This document describes minimum requirements and instructions for using a standalone version of the local virtualenv.
 
 Prerequisites
 =============
@@ -118,6 +118,15 @@ To create and initialize the local virtualenv:
 
     pip install -U -e ".[devel,<OTHER EXTRAS>]" # for example: pip install -U -e ".[devel,gcp,postgres]"
 
+In case you have problems with installing airflow because of some requirements are not installable, you can
+try to install it with the set of working constraints (note that there are different constraint files
+for different python versions:
+
+   .. code-block:: bash
+
+    pip install -U -e ".[devel,<OTHER EXTRAS>]" \
+        --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt"
+
 Note: when you first initialize database (the next step), you may encounter some problems.
 This is because airflow by default will try to load in example dags where some of them requires dependencies ``gcp`` and ``postgres``.
 You can solve the problem by:
diff --git a/README.md b/README.md
index 940e972..c7f3ad0 100644
--- a/README.md
+++ b/README.md
@@ -98,23 +98,24 @@ our dependencies as open as possible (in `setup.py`) so users can install differ
 if needed. This means that from time to time plain `pip install apache-airflow` will not work or will
 produce unusable Airflow installation.
 
-In order to have repeatable installation, however, starting from **Airflow 1.10.10** we also keep a set of
-"known-to-be-working" requirement files in the `requirements` folder. Those "known-to-be-working"
-requirements are per major/minor python version (3.6/3.7/3.8). You can use them as constraint files
-when installing Airflow from PyPI. Note that you have to specify correct Airflow version and python versions
-in the URL.
+In order to have repeatable installation, however, introduced in **Airflow 1.10.10** and updated in
+**Airflow 1.10.12** we also keep a set of "known-to-be-working" constraint files in the
+orphan `constraints-master` and `constraints-1-10` branches. We keep those "known-to-be-working"
+constraints files separately per major/minor python version.
+You can use them as constraint files when installing Airflow from PyPI. Note that you have to specify
+correct Airflow tag/version/branch and python versions in the URL.
 
 1. Installing just airflow:
 
 ```bash
-pip install apache-airflow==1.10.11 \
- --constraint https://raw.githubusercontent.com/apache/airflow/1.10.11/requirements/requirements-python3.7.txt
+pip install apache-airflow==1.10.12 \
+ --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1.10.12/constraints-3.7.txt"
 ```
 
 2. Installing with extras (for example postgres,gcp)
 ```bash
 pip install apache-airflow[postgres,gcp]==1.10.11 \
- --constraint https://raw.githubusercontent.com/apache/airflow/1.10.11/requirements/requirements-python3.7.txt
+ --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1.10.12/constraints-3.7.txt"
 ```
 
 ## Building customized production images
diff --git a/breeze b/breeze
index 571d383..27eaee0 100755
--- a/breeze
+++ b/breeze
@@ -186,7 +186,8 @@ function initialize_virtualenv() {
         echo
         pushd "${AIRFLOW_SOURCES}"
         set +e
-        pip install -e ".[devel]" --constraint "requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
+        pip install -e ".[devel]" \
+            --constraint "https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt"
         RES=$?
         set -e
         popd
@@ -825,9 +826,12 @@ function parse_arguments() {
           fi
           COMMAND_TO_RUN="run_docker_compose"
           ;;
-        generate-requirements)
+        generate-constraints)
           LAST_SUBCOMMAND="${1}"
-          COMMAND_TO_RUN="perform_generate_requirements"
+          COMMAND_TO_RUN="perform_generate_constraints"
+          export FORCE_ANSWER_TO_QUESTIONS="yes"
+          export FORCE_BUILD_IMAGES="true"
+          export UPGRADE_TO_LATEST_CONSTRAINTS="true"
           shift ;;
         push-image)
           LAST_SUBCOMMAND="${1}"
@@ -1010,7 +1014,7 @@ function prepare_usage() {
     export USAGE_CLEANUP_IMAGE="Cleans up the container image created"
     export USAGE_DOCKER_COMPOSE="Executes specified docker-compose command"
     export USAGE_FLAGS="Shows all breeze's flags"
-    export USAGE_GENERATE_REQUIREMENTS="Generates pinned requirements for pip dependencies"
+    export USAGE_GENERATE_CONSTRAINTS="Generates pinned constraint files"
     export USAGE_INITIALIZE_LOCAL_VIRTUALENV="Initializes local virtualenv"
     export USAGE_PUSH_IMAGE="Pushes images to registry"
     export USAGE_KIND_CLUSTER="Manages KinD cluster on the host"
@@ -1128,27 +1132,30 @@ $(flag_verbosity)
     export DETAILED_USAGE_FLAGS="
       Explains in detail all the flags that can be used with breeze.
 "
-    DETAILED_USAGE_GENERATE_REQUIREMENTS="
-${CMDNAME} generate-requirements [FLAGS]
+    # shellcheck disable=SC2089
+    DETAILED_USAGE_GENERATE_CONSTRAINTS="
+${CMDNAME} generate-constraints [FLAGS]
 
-      Generates pinned requirements from setup.py. Those requirements are generated in requirements
-      directory - separately for different python version. Those requirements are used to run
-      CI builds as well as run repeatable production image builds. You can use those requirements
-      to predictably install released Airflow versions. You should run it always after you update
-      setup.py.
+      Generates pinned constraint files from setup.py. Those files are generated in files folder
+      - separate files for different python version. Those constraint files when pushed to orphan
+      constraint-master and constraint-1-10 branches are used to generate repeatable
+      CI builds as well as run repeatable production image builds. You can use those constraints
+      to predictably install released Airflow versions. This is mainly used to test the constraint
+      generation - constraints are pushed to the orphan branches by a successful scheduled
+      CRON job in CI automatically.
 
 Flags:
 $(flag_airflow_variants)
 $(flag_verbosity)
 "
     # shellcheck disable=SC2090
-    export DETAILED_USAGE_GENERATE_REQUIREMENTS
+    export DETAILED_USAGE_GENERATE_CONSTRAINTS
     DETAILED_USAGE_INITIALIZE_LOCAL_VIRTUALENV="
 ${CMDNAME} initialize-local-virtualenv [FLAGS]
 
       Initializes locally created virtualenv installing all dependencies of Airflow
-      taking into account the frozen requirements from requirements folder.
-      This local virtualenv can be used to aid autocompletion and IDE support as
+      taking into account the constraints for the version specified.
+      This local virtualenv can be used to aid auto-completion and IDE support as
       well as run unit tests directly from the IDE. You need to have virtualenv
       activated before running this command.
 
@@ -1880,7 +1887,7 @@ function run_build_command {
                 rebuild_ci_image_if_needed
             fi
             ;;
-        build_docs|perform_static_checks|perform_generate_requirements)
+        build_docs|perform_static_checks|perform_generate_constraints)
             prepare_ci_build
             rebuild_ci_image_if_needed
             ;;
@@ -1996,8 +2003,8 @@ function run_breeze_command {
         cleanup_image)
             remove_images
             ;;
-        perform_generate_requirements)
-            run_generate_requirements
+        perform_generate_constraints)
+            run_generate_constraints
             ;;
         perform_push_image)
             if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
diff --git a/breeze-complete b/breeze-complete
index 73368b8..574b9c7 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -118,7 +118,7 @@ build-docs
 build-image
 cleanup-image
 exec
-generate-requirements
+generate-constraints
 push-image
 initialize-local-virtualenv
 setup-autocomplete
diff --git a/common/_default_branch.sh b/common/_default_branch.sh
index e4c00ec..b47d672 100644
--- a/common/_default_branch.sh
+++ b/common/_default_branch.sh
@@ -17,3 +17,4 @@
 # under the License.
 
 export DEFAULT_BRANCH="v1-10-test"
+export DEFAULT_CONSTRAINTS_BRANCH="constraints-1-10"
diff --git a/docs/installation.rst b/docs/installation.rst
index d5652cb..ed82157 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -30,33 +30,40 @@ our dependencies as open as possible (in ``setup.py``) so users can install diff
 if needed. This means that from time to time plain ``pip install apache-airflow`` will not work or will
 produce unusable Airflow installation.
 
-In order to have repeatable installation, however, starting from **Airflow 1.10.10** we also keep a set of
-"known-to-be-working" requirement files in the ``requirements`` folder. Those "known-to-be-working"
-requirements are per major/minor python version (3.6/3.7). You can use them as constraint
+In order to have repeatable installation, however, starting from **Airflow 1.10.10** and updated in
+**Airflow 1.10.12** we also keep a set of "known-to-be-working" constraint files in the
+``constraints-master`` and ``constraints-1-10`` orphan branches.
+Those "known-to-be-working" constraints are per major/minor python version. You can use them as constraint
 files when installing Airflow from PyPI. Note that you have to specify correct Airflow version
 and python versions in the URL.
 
+
+  **Prerequisites**
+
+  On Debian based Linux OS:
+
+  .. code-block:: bash
+
+      sudo apt-get update
+      sudo apt-get install build-essential
+
+
 1. Installing just airflow
 
 .. code-block:: bash
 
     pip install \
-     apache-airflow==1.10.10 \
-     --constraint \
-            https://raw.githubusercontent.com/apache/airflow/1.10.10/requirements/requirements-python3.7.txt
-
+     apache-airflow==1.10.12 \
+     --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1.10.12/constraints-3.7.txt"
 
-You need certain system level requirements in order to install Airflow. Those are requirements that are known
-to be needed for Linux system (Tested on Ubuntu Buster LTS) :
 
 2. Installing with extras (for example postgres, gcp)
 
 .. code-block:: bash
 
     pip install \
-     apache-airflow[postgres,gcp]==1.10.10 \
-     --constraint \
-            https://raw.githubusercontent.com/apache/airflow/1.10.10/requirements/requirements-python3.7.txt
+     apache-airflow[postgres,gcp]==1.10.12 \
+     --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1.10.12/constraints-3.7.txt"
 
 
 You need certain system level requirements in order to install Airflow. Those are requirements that are known
diff --git a/requirements/REMOVE.md b/requirements/REMOVE.md
new file mode 100644
index 0000000..e5163fb
--- /dev/null
+++ b/requirements/REMOVE.md
@@ -0,0 +1,22 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+ -->
+
+This directory should be removed as soon as we release Airflow 1.10.12
+and sufficient time passes for everyone to switch to new way of retrieving
+constraints.
diff --git a/scripts/ci/requirements/ci_generate_requirements.sh b/scripts/ci/constraints/ci_generate_constraints.sh
similarity index 97%
rename from scripts/ci/requirements/ci_generate_requirements.sh
rename to scripts/ci/constraints/ci_generate_constraints.sh
index 5cc4a0e..1b7ee4f 100755
--- a/scripts/ci/requirements/ci_generate_requirements.sh
+++ b/scripts/ci/constraints/ci_generate_constraints.sh
@@ -26,4 +26,4 @@ prepare_ci_build
 
 rebuild_ci_image_if_needed
 
-run_generate_requirements
+run_generate_constraints
diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml
index 822d49d..3e1abab 100644
--- a/scripts/ci/docker-compose/local.yml
+++ b/scripts/ci/docker-compose/local.yml
@@ -49,7 +49,6 @@ services:
       - ../../../hooks:/opt/airflow/hooks:cached
       - ../../../logs:/root/airflow/logs:cached
       - ../../../pytest.ini:/opt/airflow/pytest.ini:cached
-      - ../../../requirements:/opt/airflow/requirements:cached
       - ../../../scripts:/opt/airflow/scripts:cached
       - ../../../scripts/ci/in_container/entrypoint_ci.sh:/entrypoint:cached
       - ../../../setup.cfg:/opt/airflow/setup.cfg:cached
diff --git a/scripts/ci/in_container/run_generate_constraints.sh b/scripts/ci/in_container/run_generate_constraints.sh
new file mode 100755
index 0000000..9b18c79
--- /dev/null
+++ b/scripts/ci/in_container/run_generate_constraints.sh
@@ -0,0 +1,50 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# shellcheck source=scripts/ci/in_container/_in_container_script_init.sh
+. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh"
+
+# adding trap to exiting trap
+HANDLERS="$( trap -p EXIT | cut -f2 -d \' )"
+# shellcheck disable=SC2064
+trap "${HANDLERS}${HANDLERS:+;}in_container_fix_ownership" EXIT
+
+CONSTRAINTS_DIR="/files/constraints-${PYTHON_MAJOR_MINOR_VERSION}"
+
+LATEST_CONSTRAINT_FILE="${CONSTRAINTS_DIR}/original-constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt"
+CURRENT_CONSTRAINT_FILE="${CONSTRAINTS_DIR}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt"
+
+mkdir -pv "${CONSTRAINTS_DIR}"
+
+curl "${AIRFLOW_CONSTRAINTS_URL}" --output "${LATEST_CONSTRAINT_FILE}"
+
+echo
+echo "Freezing constraints to ${CURRENT_CONSTRAINT_FILE}"
+echo
+
+pip freeze | sort | \
+    grep -v "apache_airflow" | \
+    grep -v "/opt/airflow" >"${CURRENT_CONSTRAINT_FILE}"
+
+echo
+echo "Constraints generated in ${CURRENT_CONSTRAINT_FILE}"
+echo
+
+set +e
+diff --color=always "${LATEST_CONSTRAINT_FILE}" "${CURRENT_CONSTRAINT_FILE}"
+
+exit 0
diff --git a/scripts/ci/in_container/run_generate_requirements.sh b/scripts/ci/in_container/run_generate_requirements.sh
deleted file mode 100755
index 5022e13..0000000
--- a/scripts/ci/in_container/run_generate_requirements.sh
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-# shellcheck source=scripts/ci/in_container/_in_container_script_init.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh"
-
-# adding trap to exiting trap
-HANDLERS="$( trap -p EXIT | cut -f2 -d \' )"
-# shellcheck disable=SC2064
-trap "${HANDLERS}${HANDLERS:+;}in_container_fix_ownership" EXIT
-
-STORED_SETUP_PY_HASH_FILE="${AIRFLOW_SOURCES}/requirements/setup-${PYTHON_MAJOR_MINOR_VERSION}.md5"
-
-CURRENT_SETUP_PY_HASH=$(md5sum "${AIRFLOW_SOURCES}/setup.py")
-STORED_SETUP_PY_HASH=$(cat "${STORED_SETUP_PY_HASH_FILE}" 2>/dev/null || true)
-
-if [[ ${STORED_SETUP_PY_HASH} != "${CURRENT_SETUP_PY_HASH}" && ${CHECK_REQUIREMENTS_ONLY:=} == "true" ]]; then
-    echo
-    echo "ERROR! Setup.py changed since last time requirements were generated"
-    echo
-    echo "     When you update setup.py, you have to run"
-    echo
-    echo "           breeze generate-requirements --python ${PYTHON_MAJOR_MINOR_VERSION}"
-    echo
-    echo
-    exit 1
-fi
-
-# Upgrading requirements will happen only in CRON job to see that we have some
-# new requirements released
-if [[ ${UPGRADE_WHILE_GENERATING_REQUIREMENTS} == "true" ]]; then
-    echo
-    echo "Upgrading requirements to latest ones"
-    echo
-    pip install -e ".[${AIRFLOW_EXTRAS}]" --upgrade --upgrade-strategy eager
-fi
-
-OLD_REQUIREMENTS_FILE="/tmp/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
-GENERATED_REQUIREMENTS_FILE="${AIRFLOW_SOURCES}/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
-
-echo
-echo "Copying requirements ${GENERATED_REQUIREMENTS_FILE} -> ${OLD_REQUIREMENTS_FILE}"
-echo
-cp "${GENERATED_REQUIREMENTS_FILE}" "${OLD_REQUIREMENTS_FILE}"
-
-echo
-echo "Freezing requirements to ${GENERATED_REQUIREMENTS_FILE}"
-echo
-
-pip freeze | sort | \
-    grep -v "apache_airflow" | \
-    grep -v "/opt/airflow" >"${GENERATED_REQUIREMENTS_FILE}"
-
-echo
-echo "Requirements generated in ${GENERATED_REQUIREMENTS_FILE}"
-echo
-
-echo
-echo "Storing setup.py hash in ${STORED_SETUP_PY_HASH_FILE}"
-echo
-echo "${CURRENT_SETUP_PY_HASH}" > "${STORED_SETUP_PY_HASH_FILE}"
-
-set +e
-diff --color=always "${OLD_REQUIREMENTS_FILE}" "${GENERATED_REQUIREMENTS_FILE}"
-
-exit 0
diff --git a/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh b/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh
index 3d8194a..bb5a31e 100755
--- a/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh
+++ b/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh
@@ -87,10 +87,11 @@ fi
 . "${VIRTUALENV_PATH}/bin/activate"
 
 pip install pytest freezegun pytest-cov \
-    --constraint "requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
+  --constraint "https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt"
+
 
 pip install -e ".[kubernetes]" \
-    --constraint "requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
+  --constraint "https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt"
 
 if [[ ${INTERACTIVE} == "true" ]]; then
     echo
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 01eac17..9020173 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -29,26 +29,27 @@ function add_build_args_for_remote_install() {
     )
     if [[ ${AIRFLOW_VERSION} =~ [^0-9]*1[^0-9]*10[^0-9]([0-9]*) ]]; then
         # All types of references/versions match this regexp for 1.10 series
-        # for example v1_10_test, 1.10.10, 1.10.9 etc. ${BASH_REMATCH[1]} is the () group matches last
+        # for example v1_10_test, 1.10.10, 1.10.9 etc. ${BASH_REMATCH[1]} matches last
         # minor digit of version and it's length is 0 for v1_10_test, 1 for 1.10.9 and 2 for 1.10.10+
-        if [[ ${#BASH_REMATCH[1]} == "1" ]]; then
-            # This is only for 1.10.0 - 1.10.9
+        AIRFLOW_MINOR_VERSION_NUMBER=${BASH_REMATCH[1]}
+        if [[ ${#AIRFLOW_MINOR_VERSION_NUMBER} == "0" ]]; then
+            # For v1_10_* branches use constraints-1-10 branch
             EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-                "--build-arg" "CONSTRAINT_REQUIREMENTS=https://raw.githubusercontent.com/apache/airflow/1.10.10/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
+                "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-1-10"
             )
         else
             EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-                # For 1.10.10+ and v1-10-test it's ok to use AIRFLOW_VERSION as reference
-                "--build-arg" "CONSTRAINT_REQUIREMENTS=https://raw.githubusercontent.com/apache/airflow/${AIRFLOW_VERSION}/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
+                # For specified minor version of 1.10 use specific reference constraints
+                "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION}"
             )
         fi
         AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v1-10-test"
     else
-        # For all other (master, 2.0+) we just match ${AIRFLOW_VERSION}
+        # For all other (master, 2.0+) we just get the default constraint branch
         EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-            "--build-arg" "CONSTRAINT_REQUIREMENTS=https://raw.githubusercontent.com/apache/airflow/${AIRFLOW_VERSION}/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
+            "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH}"
         )
-        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="master"
+        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING=${DEFAULT_BRANCH}
     fi
 }
 
@@ -485,7 +486,7 @@ function rebuild_ci_image_if_needed_and_confirmed() {
 }
 
 # Determines the strategy to be used for caching based on the type of CI job run.
-# In case of CRON jobs we run builds without cache and upgrade to latest requirements
+# In case of CRON jobs we run builds without cache and upgrade contstraint files to latest
 function determine_cache_strategy() {
     if [[ "${CI_EVENT_TYPE:=}" == "schedule" ]]; then
         echo
@@ -493,18 +494,14 @@ function determine_cache_strategy() {
         echo
         export DOCKER_CACHE="disabled"
         echo
-        echo "Requirements are upgraded to latest for scheduled CI build"
-        echo
-        export UPGRADE_TO_LATEST_REQUIREMENTS="true"
     else
         echo
         echo "Pull cache used for regular CI builds"
         echo
         export DOCKER_CACHE="pulled"
         echo
-        echo "Requirements are not upgraded to latest ones for regular CI builds"
+        echo "Constraints are not upgraded to latest ones for regular CI builds"
         echo
-        export UPGRADE_TO_LATEST_REQUIREMENTS="false"
     fi
 }
 
@@ -571,14 +568,15 @@ Docker building ${AIRFLOW_CI_IMAGE}.
     verbose_docker build \
         --build-arg PYTHON_BASE_IMAGE="${PYTHON_BASE_IMAGE}" \
         --build-arg PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION}" \
-            --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
+        --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
         --build-arg AIRFLOW_BRANCH="${BRANCH_NAME}" \
         --build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \
         --build-arg ADDITIONAL_AIRFLOW_EXTRAS="${ADDITIONAL_AIRFLOW_EXTRAS}" \
         --build-arg ADDITIONAL_PYTHON_DEPS="${ADDITIONAL_PYTHON_DEPS}" \
         --build-arg ADDITIONAL_DEV_DEPS="${ADDITIONAL_DEV_DEPS}" \
         --build-arg ADDITIONAL_RUNTIME_DEPS="${ADDITIONAL_RUNTIME_DEPS}" \
-        --build-arg UPGRADE_TO_LATEST_REQUIREMENTS="${UPGRADE_TO_LATEST_REQUIREMENTS}" \
+        --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
+        --build-arg UPGRADE_TO_LATEST_CONSTRAINTS="${UPGRADE_TO_LATEST_CONSTRAINTS}" \
         "${DOCKER_CACHE_CI_DIRECTIVE[@]}" \
         -t "${AIRFLOW_CI_IMAGE}" \
         --target "main" \
@@ -727,6 +725,7 @@ function build_prod_image() {
         --build-arg ADDITIONAL_AIRFLOW_EXTRAS="${ADDITIONAL_AIRFLOW_EXTRAS}" \
         --build-arg ADDITIONAL_PYTHON_DEPS="${ADDITIONAL_PYTHON_DEPS}" \
         --build-arg ADDITIONAL_DEV_DEPS="${ADDITIONAL_DEV_DEPS}" \
+        --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="${DEFAULT_CONSTRAINTS_BRANCH}" \
         "${DOCKER_CACHE_PROD_BUILD_DIRECTIVE[@]}" \
         -t "${AIRFLOW_PROD_BUILD_IMAGE}" \
         --target "airflow-build-image" \
@@ -743,6 +742,7 @@ function build_prod_image() {
         --build-arg AIRFLOW_BRANCH="${AIRFLOW_BRANCH_FOR_PYPI_PRELOADING}" \
         --build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \
         --build-arg EMBEDDED_DAGS="${EMBEDDED_DAGS}" \
+        --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="${DEFAULT_CONSTRAINTS_BRANCH}" \
         "${DOCKER_CACHE_PROD_DIRECTIVE[@]}" \
         -t "${AIRFLOW_PROD_IMAGE}" \
         --target "main" \
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index e759b03..d3224e7 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -157,9 +157,9 @@ function initialize_common_environment {
         done
     fi
 
-    # By default we are not upgrading to latest requirements when building Docker CI image
+    # By default we are not upgrading to latest version of constraints when building Docker CI image
     # This will only be done in cron jobs
-    export UPGRADE_TO_LATEST_REQUIREMENTS=${UPGRADE_TO_LATEST_REQUIREMENTS:="false"}
+    export UPGRADE_TO_LATEST_CONSTRAINTS=${UPGRADE_TO_LATEST_CONSTRAINTS:="false"}
 
     # In case of MacOS we need to use gstat - gnu version of the stats
     export STAT_BIN=stat
@@ -174,23 +174,13 @@ function initialize_common_environment {
     # default version of python used to tag the "master" and "latest" images in DockerHub
     export DEFAULT_PYTHON_MAJOR_MINOR_VERSION=3.6
 
-    # In case we are not in CI - we assume we run locally. There are subtle changes if you run
-    # CI scripts locally - for example requirements are eagerly updated if you do local run
-    # in generate requirements
+    # In case we are not in CI - we assume we run locally.
     if [[ ${CI:="false"} == "true" ]]; then
         export LOCAL_RUN="false"
     else
         export LOCAL_RUN="true"
     fi
 
-    # eager upgrade while generating requirements should only happen in locally run
-    # pre-commits or in cron job
-    if [[ ${LOCAL_RUN} == "true" ]]; then
-        export UPGRADE_WHILE_GENERATING_REQUIREMENTS="true"
-    else
-        export UPGRADE_WHILE_GENERATING_REQUIREMENTS=${UPGRADE_WHILE_GENERATING_REQUIREMENTS:="false"}
-    fi
-
     # Default extras used for building CI image
     export DEFAULT_CI_EXTRAS="devel_ci"
 
diff --git a/scripts/ci/libraries/_local_mounts.sh b/scripts/ci/libraries/_local_mounts.sh
index 127ebb3..da16744 100644
--- a/scripts/ci/libraries/_local_mounts.sh
+++ b/scripts/ci/libraries/_local_mounts.sh
@@ -45,7 +45,6 @@ function generate_local_mounts_list {
         "$prefix"hooks:/opt/airflow/hooks:cached
         "$prefix"logs:/root/airflow/logs:cached
         "$prefix"pytest.ini:/opt/airflow/pytest.ini:cached
-        "$prefix"requirements:/opt/airflow/requirements:cached
         "$prefix"scripts:/opt/airflow/scripts:cached
         "$prefix"scripts/ci/in_container/entrypoint_ci.sh:/entrypoint:cached
         "$prefix"setup.cfg:/opt/airflow/setup.cfg:cached
diff --git a/scripts/ci/libraries/_runs.sh b/scripts/ci/libraries/_runs.sh
index 76b674d..ef18af8 100644
--- a/scripts/ci/libraries/_runs.sh
+++ b/scripts/ci/libraries/_runs.sh
@@ -34,8 +34,8 @@ function run_docs() {
             | tee -a "${OUTPUT_LOG}"
 }
 
-# Docker command to generate constraint requirement files.
-function run_generate_requirements() {
+# Docker command to generate constraint files.
+function run_generate_constraints() {
     docker run "${EXTRA_DOCKER_FLAGS[@]}" \
         --entrypoint "/usr/local/bin/dumb-init"  \
         --env PYTHONDONTWRITEBYTECODE \
@@ -46,11 +46,9 @@ function run_generate_requirements() {
         --env HOST_OS="$(uname -s)" \
         --env HOST_HOME="${HOME}" \
         --env HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" \
-        --env UPGRADE_WHILE_GENERATING_REQUIREMENTS \
         --env PYTHON_MAJOR_MINOR_VERSION \
-        --env CHECK_REQUIREMENTS_ONLY \
         --rm \
         "${AIRFLOW_CI_IMAGE}" \
-        "--" "/opt/airflow/scripts/ci/in_container/run_generate_requirements.sh" \
+        "--" "/opt/airflow/scripts/ci/in_container/run_generate_constraints.sh" \
         | tee -a "${OUTPUT_LOG}"
 }
diff --git a/scripts/ci/pre_commit/pre_commit_generate_requirements.sh b/scripts/ci/pre_commit/pre_commit_generate_requirements.sh
deleted file mode 100755
index d0c2deb..0000000
--- a/scripts/ci/pre_commit/pre_commit_generate_requirements.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-export FORCE_ANSWER_TO_QUESTIONS=${FORCE_ANSWER_TO_QUESTIONS:="quit"}
-export REMEMBER_LAST_ANSWER="true"
-
-export PYTHON_MAJOR_MINOR_VERSION="${1}"
-
-# shellcheck source=scripts/ci/requirements/ci_generate_requirements.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/../generate_requirements/ci_generate_requirements.sh"
diff --git a/scripts/ci/static_checks/ci_run_static_checks.sh b/scripts/ci/static_checks/ci_run_static_checks.sh
index 6b7f124..e7cf6f2 100755
--- a/scripts/ci/static_checks/ci_run_static_checks.sh
+++ b/scripts/ci/static_checks/ci_run_static_checks.sh
@@ -33,6 +33,9 @@ prepare_ci_build
 
 rebuild_ci_image_if_needed
 
+python -m pip install pre-commit \
+  --constraint "https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt"
+
 if [[ $# == "0" ]]; then
     pre-commit run --all-files --show-diff-on-failure --color always
 else
diff --git a/scripts/ci/tools/ci_check_if_tests_should_be_run.sh b/scripts/ci/tools/ci_check_if_tests_should_be_run.sh
index 5a51048..0b8a7b1 100755
--- a/scripts/ci/tools/ci_check_if_tests_should_be_run.sh
+++ b/scripts/ci/tools/ci_check_if_tests_should_be_run.sh
@@ -26,7 +26,6 @@ CHANGED_FILES_PATTERNS=(
     "^scripts"
     "^chart"
     "^setup.py"
-    "^requirements"
     "^tests"
     "^kubernetes_tests"
 )