You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2021/03/23 03:26:05 UTC

[airflow] 18/34: Prepare to switch master branch for main. (#14688)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 22ad5d9bc50400952a6b222bde71c9f8438b772b
Author: Ash Berlin-Taylor <as...@firemirror.com>
AuthorDate: Mon Mar 15 16:44:35 2021 +0000

    Prepare to switch master branch for main. (#14688)
    
    There are many more references to "master" (even in our own repo) than
    this, but this commit is the first step: to that process.
    
    It makes CI run on the main branch (once it exists), re-words a few
    cases where we can to easily not refer to master anymore.
    
    This doesn't yet re-name the `constraints-master` or `master-*` images -
    that will be done in a future PR.
    
    (We don't be able to entirely eliminate "master" from our repo as we
    refer to a lot of other GitHub repos that we can't change.)
    
    (cherry picked from commit 0dea083fcb01a239fa78efb04283bbfb60d88f5e)
---
 .asf.yaml                                          |  3 ++
 .github/workflows/codeql-analysis.yml              |  2 +-
 .github/workflows/scheduled_quarantined.yml        |  2 +-
 Dockerfile                                         |  6 ++--
 Dockerfile.ci                                      | 10 +++---
 chart/templates/create-user-job.yaml               |  2 +-
 chart/templates/migrate-database-job.yaml          |  2 +-
 codecov.yml                                        |  2 ++
 docs/README.rst                                    |  2 +-
 docs/conf.py                                       |  8 ++---
 scripts/ci/constraints/ci_branch_constraints.sh    |  4 ++-
 scripts/ci/libraries/_build_images.sh              |  4 +--
 scripts/ci/libraries/_initialization.sh            |  4 +--
 scripts/ci/selective_ci_checks.sh                  |  6 ++--
 ...aster.sh => install_airflow_from_branch_tip.sh} | 10 +++---
 tests/models/test_dagbag.py                        | 40 +++++++++++-----------
 16 files changed, 57 insertions(+), 50 deletions(-)

diff --git a/.asf.yaml b/.asf.yaml
index 6b1bb15..a9e6961 100644
--- a/.asf.yaml
+++ b/.asf.yaml
@@ -44,6 +44,9 @@ github:
     master:
       required_pull_request_reviews:
         required_approving_review_count: 1
+    main:
+      required_pull_request_reviews:
+        required_approving_review_count: 1
     v1-10-stable:
       required_pull_request_reviews:
         required_approving_review_count: 1
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index c39aa68..0c35b148 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -20,7 +20,7 @@ name: "CodeQL"
 
 on:  # yamllint disable-line rule:truthy
   push:
-    branches: [master]
+    branches: [master, main]
   schedule:
     - cron: '0 2 * * *'
 
diff --git a/.github/workflows/scheduled_quarantined.yml b/.github/workflows/scheduled_quarantined.yml
index 2f7f05f..5181b02 100644
--- a/.github/workflows/scheduled_quarantined.yml
+++ b/.github/workflows/scheduled_quarantined.yml
@@ -72,7 +72,7 @@ jobs:
         with:
           python-version: '3.7'
       - name: "Set issue id for master"
-        if: github.ref == 'refs/heads/master'
+        if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main'
         run: |
           echo "ISSUE_ID=10118" >> $GITHUB_ENV
       - name: "Set issue id for v1-10-stable"
diff --git a/Dockerfile b/Dockerfile
index 04ac076..a62ce15 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -204,8 +204,8 @@ ENV AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES}
 ARG INSTALL_PROVIDERS_FROM_SOURCES="false"
 ENV INSTALL_PROVIDERS_FROM_SOURCES=${INSTALL_PROVIDERS_FROM_SOURCES}
 
-# Only copy install_airflow_from_latest_master.sh to not invalidate cache on other script changes
-COPY scripts/docker/install_airflow_from_latest_master.sh /scripts/docker/install_airflow_from_latest_master.sh
+# Only copy install_airflow_from_branch_tip.sh to not invalidate cache on other script changes
+COPY scripts/docker/install_airflow_from_branch_tip.sh /scripts/docker/install_airflow_from_branch_tip.sh
 
 # By default we do not upgrade to latest dependencies
 ARG UPGRADE_TO_NEWER_DEPENDENCIES="false"
@@ -219,7 +219,7 @@ ENV UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES}
 # account for removed dependencies (we do not install them in the first place)
 RUN if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" && \
           ${UPGRADE_TO_NEWER_DEPENDENCIES} == "false" ]]; then \
-        bash /scripts/docker/install_airflow_from_latest_master.sh; \
+        bash /scripts/docker/install_airflow_from_branch_tip.sh; \
     fi
 
 # By default we install latest airflow from PyPI so we do not need to copy sources of Airflow
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 0231196..9629621 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -241,12 +241,12 @@ RUN echo "Installing with extras: ${AIRFLOW_EXTRAS}."
 ARG CONSTRAINTS_GITHUB_REPOSITORY="apache/airflow"
 ENV CONSTRAINTS_GITHUB_REPOSITORY=${CONSTRAINTS_GITHUB_REPOSITORY}
 
-ARG AIRFLOW_CONSTRAINTS_REFERENCE="constraints-master"
+ARG AIRFLOW_CONSTRAINTS_REFERENCE="constraints-${AIRFLOW_BRANCH}"
 ARG AIRFLOW_CONSTRAINTS="constraints"
 ARG AIRFLOW_CONSTRAINTS_LOCATION="https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${AIRFLOW_CONSTRAINTS_REFERENCE}/${AIRFLOW_CONSTRAINTS}-${PYTHON_MAJOR_MINOR_VERSION}.txt"
 ENV AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}
 
-# By changing the CI build epoch we can force reinstalling Airflow from the current master
+# By changing the CI build epoch we can force reinstalling Airflow and pip all dependencies
 # It can also be overwritten manually by setting the AIRFLOW_CI_BUILD_EPOCH environment variable.
 ARG AIRFLOW_CI_BUILD_EPOCH="3"
 ENV AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH}
@@ -292,8 +292,8 @@ ENV PIP_PROGRESS_BAR=${PIP_PROGRESS_BAR}
 
 RUN pip install --no-cache-dir --upgrade "pip==${AIRFLOW_PIP_VERSION}"
 
-# Only copy install_airflow_from_latest_master.sh to not invalidate cache on other script changes
-COPY scripts/docker/install_airflow_from_latest_master.sh /scripts/docker/install_airflow_from_latest_master.sh
+# Only copy install_airflow_from_branch_tip.sh to not invalidate cache on other script changes
+COPY scripts/docker/install_airflow_from_branch_tip.sh /scripts/docker/install_airflow_from_branch_tip.sh
 
 ARG UPGRADE_TO_NEWER_DEPENDENCIES="false"
 ENV UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES}
@@ -307,7 +307,7 @@ ENV UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES}
 # account for removed dependencies (we do not install them in the first place)
 RUN if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" && \
           ${UPGRADE_TO_NEWER_DEPENDENCIES} == "false" ]]; then \
-        bash /scripts/docker/install_airflow_from_latest_master.sh; \
+        bash /scripts/docker/install_airflow_from_branch_tip.sh; \
     fi
 
 # Generate random hex dump file so that we can determine whether it's faster to rebuild the image
diff --git a/chart/templates/create-user-job.yaml b/chart/templates/create-user-job.yaml
index 30b6c25..3a3aa3f 100644
--- a/chart/templates/create-user-job.yaml
+++ b/chart/templates/create-user-job.yaml
@@ -67,7 +67,7 @@ spec:
           args:
             - "bash"
             - "-c"
-            # Support running against 1.10.x and 2.0.0dev/master
+            # Support running against 1.10.x and 2.x
             - 'airflow users create "$@" || airflow create_user "$@"'
             - --
             - "-r"
diff --git a/chart/templates/migrate-database-job.yaml b/chart/templates/migrate-database-job.yaml
index 975c46b..bbcc67a 100644
--- a/chart/templates/migrate-database-job.yaml
+++ b/chart/templates/migrate-database-job.yaml
@@ -63,7 +63,7 @@ spec:
         - name: run-airflow-migrations
           image: {{ template "airflow_image" . }}
           imagePullPolicy: {{ .Values.images.airflow.pullPolicy }}
-          # Support running against 1.10.x and 2.0.0dev/master
+          # Support running against 1.10.x and 2.x
           args: ["bash", "-c", "airflow db upgrade || airflow upgradedb"]
           envFrom:
           {{- include "custom_airflow_environment_from" . | default "\n  []" | indent 10 }}
diff --git a/codecov.yml b/codecov.yml
index 518e4fb..d68d7a8 100644
--- a/codecov.yml
+++ b/codecov.yml
@@ -37,6 +37,7 @@ coverage:
         # advanced
         branches:
           - master
+          - main
           - v1-10-stable
           - v1-10-test
           - v2-0-test
@@ -53,6 +54,7 @@ coverage:
         # advanced
         branches:
           - master
+          - main
           - v1-10-stable
           - v1-10-test
           - v2-0-test
diff --git a/docs/README.rst b/docs/README.rst
index 4bb140c..5ea38ad 100644
--- a/docs/README.rst
+++ b/docs/README.rst
@@ -25,7 +25,7 @@ For Helm Chart, see: `/chart/README.md <../chart/READMe.md>`__
 Development documentation preview
 ==================================
 
-Documentation from the ``master`` branch is built and automatically published: `s.apache.org/airflow-docs <https://s.apache.org/airflow-docs>`_
+Documentation from the development version is built and automatically published: `s.apache.org/airflow-docs <https://s.apache.org/airflow-docs>`_
 
 Documentation for your PRs is available as downloadable artifact in GitHub Actions after the CI builds your PR.
 
diff --git a/docs/conf.py b/docs/conf.py
index c68f6ea..2a4ca2b 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -76,10 +76,10 @@ elif PACKAGE_NAME.startswith('apache-airflow-providers-'):
     except StopIteration:
         raise Exception(f"Could not find provider.yaml file for package: {PACKAGE_NAME}")
     PACKAGE_DIR = CURRENT_PROVIDER['package-dir']
-    PACKAGE_VERSION = 'master'
+    PACKAGE_VERSION = 'devel'
 else:
     PACKAGE_DIR = None
-    PACKAGE_VERSION = 'master'
+    PACKAGE_VERSION = 'devel'
 # Adds to environment variables for easy access from other plugins like airflow_intersphinx.
 os.environ['AIRFLOW_PACKAGE_NAME'] = PACKAGE_NAME
 if PACKAGE_DIR:
@@ -302,8 +302,8 @@ html_context = {
     'conf_py_path': f'/docs/{PACKAGE_NAME}/',
     'github_user': 'apache',
     'github_repo': 'airflow',
-    'github_version': 'master',
-    'display_github': 'master',
+    'github_version': 'devel',
+    'display_github': 'devel',
     'suffix': '.rst',
 }
 
diff --git a/scripts/ci/constraints/ci_branch_constraints.sh b/scripts/ci/constraints/ci_branch_constraints.sh
index 6be2d84..1f733d4 100755
--- a/scripts/ci/constraints/ci_branch_constraints.sh
+++ b/scripts/ci/constraints/ci_branch_constraints.sh
@@ -18,7 +18,9 @@
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
-if [[ ${GITHUB_REF} == 'refs/heads/master' ]]; then
+if [[ ${GITHUB_REF} == 'refs/heads/main' ]]; then
+  echo "::set-output name=branch::constraints-main"
+elif [[ ${GITHUB_REF} == 'refs/heads/master' ]]; then
   echo "::set-output name=branch::constraints-master"
 elif [[ ${GITHUB_REF} == 'refs/heads/v1-10-test' ]]; then
   echo "::set-output name=branch::constraints-1-10"
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 27beb74..8fe0983 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -124,7 +124,7 @@ function build_images::forget_last_answer() {
 function build_images::confirm_via_terminal() {
     echo >"${DETECTED_TERMINAL}"
     echo >"${DETECTED_TERMINAL}"
-    echo "${COLOR_YELLOW}WARNING:Make sure that you rebased to latest master before rebuilding!${COLOR_RESET}" >"${DETECTED_TERMINAL}"
+    echo "${COLOR_YELLOW}WARNING:Make sure that you rebased to latest upstream before rebuilding!${COLOR_RESET}" >"${DETECTED_TERMINAL}"
     echo >"${DETECTED_TERMINAL}"
     # Make sure to use output of tty rather than stdin/stdout when available - this way confirm
     # will works also in case of pre-commits (git does not pass stdin/stdout to pre-commit hooks)
@@ -175,7 +175,7 @@ function build_images::confirm_image_rebuild() {
     elif [[ -t 0 ]]; then
         echo
         echo
-        echo "${COLOR_YELLOW}WARNING:Make sure that you rebased to latest master before rebuilding!${COLOR_RESET}"
+        echo "${COLOR_YELLOW}WARNING:Make sure that you rebased to latest upstream before rebuilding!${COLOR_RESET}"
         echo
         # Check if this script is run interactively with stdin open and terminal attached
         "${AIRFLOW_SOURCES}/confirm" "${ACTION} image ${THE_IMAGE_TYPE}-python${PYTHON_MAJOR_MINOR_VERSION}"
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 8634635..13132ce 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -195,7 +195,7 @@ function initialization::initialize_files_for_rebuild_check() {
         "scripts/docker/compile_www_assets.sh"
         "scripts/docker/install_additional_dependencies.sh"
         "scripts/docker/install_airflow.sh"
-        "scripts/docker/install_airflow_from_latest_master.sh"
+        "scripts/docker/install_airflow_from_branch_tip.sh"
         "scripts/docker/install_from_docker_context_files.sh"
         "scripts/docker/install_mysql.sh"
         "airflow/www/package.json"
@@ -314,7 +314,7 @@ function initialization::initialize_image_build_variables() {
     # Default build id
     export CI_BUILD_ID="${CI_BUILD_ID:="0"}"
 
-    # Default extras used for building Production image. The master of this information is in the Dockerfile
+    # Default extras used for building Production image. The canonical source of this information is in the Dockerfile
     DEFAULT_PROD_EXTRAS=$(grep "ARG AIRFLOW_EXTRAS=" "${AIRFLOW_SOURCES}/Dockerfile" |
         awk 'BEGIN { FS="=" } { print $2 }' | tr -d '"')
     export DEFAULT_PROD_EXTRAS
diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh
index 68adf9e..456fcef 100755
--- a/scripts/ci/selective_ci_checks.sh
+++ b/scripts/ci/selective_ci_checks.sh
@@ -64,7 +64,7 @@ function output_all_basic_variables() {
     else
         initialization::ga_output python-versions \
             "$(initialization::parameters_to_json "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}")"
-        # this will work as long as DEFAULT_PYTHON_MAJOR_VERSION is the same master/v1-10
+        # this will work as long as DEFAULT_PYTHON_MAJOR_VERSION is the same on HEAD and v1-10
         # all-python-versions are used in BuildImage Workflow
         initialization::ga_output all-python-versions \
             "$(initialization::parameters_to_json "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}")"
@@ -611,11 +611,11 @@ upgrade_to_newer_dependencies="false"
 
 if (($# < 1)); then
     echo
-    echo "No Commit SHA - running all tests (likely direct master merge, or scheduled run)!"
+    echo "No Commit SHA - running all tests (likely direct merge, or scheduled run)!"
     echo
     INCOMING_COMMIT_SHA=""
     readonly INCOMING_COMMIT_SHA
-    # override FULL_TESTS_NEEDED_LABEL in master/scheduled run
+    # override FULL_TESTS_NEEDED_LABEL in main/scheduled run
     FULL_TESTS_NEEDED_LABEL="true"
     readonly FULL_TESTS_NEEDED_LABEL
     output_all_basic_variables
diff --git a/scripts/docker/install_airflow_from_latest_master.sh b/scripts/docker/install_airflow_from_branch_tip.sh
similarity index 87%
rename from scripts/docker/install_airflow_from_latest_master.sh
rename to scripts/docker/install_airflow_from_branch_tip.sh
index bf43215..3741055 100755
--- a/scripts/docker/install_airflow_from_latest_master.sh
+++ b/scripts/docker/install_airflow_from_branch_tip.sh
@@ -17,7 +17,7 @@
 # under the License.
 # shellcheck disable=SC2086
 
-# Installs Airflow from latest master. This is pure optimisation. It is done because we do not want
+# Installs Airflow from $AIRFLOW_BRANCH tip. This is pure optimisation. It is done because we do not want
 # to reinstall all dependencies from scratch when setup.py changes. Problem with Docker caching is that
 # when a file is changed, when added to docker context, it invalidates the cache and it causes Docker
 # build to reinstall all dependencies from scratch. This can take a loooooot of time. Therefore we install
@@ -37,14 +37,14 @@ test -v AIRFLOW_PIP_VERSION
 
 set -x
 
-function install_airflow_from_latest_master() {
+function install_airflow_from_branch_tip() {
     echo
-    echo Installing airflow from latest master. It is used to cache dependencies
+    echo "Installing airflow from ${AIRFLOW_BRANCH}. It is used to cache dependencies"
     echo
     if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then
        AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,}
     fi
-    # Install latest master set of dependencies using constraints \
+    # Install latest set of dependencies using constraints
     pip install ${AIRFLOW_INSTALL_USER_FLAG} \
       "https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz#egg=apache-airflow[${AIRFLOW_EXTRAS}]" \
       --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}"
@@ -57,4 +57,4 @@ function install_airflow_from_latest_master() {
     pip uninstall --yes apache-airflow
 }
 
-install_airflow_from_latest_master
+install_airflow_from_branch_tip
diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py
index 6c6b1cb..95d0e79 100644
--- a/tests/models/test_dagbag.py
+++ b/tests/models/test_dagbag.py
@@ -323,27 +323,27 @@ class TestDagBag(unittest.TestCase):
             from airflow.operators.dummy import DummyOperator
             from airflow.operators.subdag import SubDagOperator
 
-            dag_name = 'master'
+            dag_name = 'parent'
             default_args = {'owner': 'owner1', 'start_date': datetime.datetime(2016, 1, 1)}
             dag = DAG(dag_name, default_args=default_args)
 
-            # master:
+            # parent:
             #     A -> opSubDag_0
-            #          master.opsubdag_0:
+            #          parent.opsubdag_0:
             #              -> subdag_0.task
             #     A -> opSubDag_1
-            #          master.opsubdag_1:
+            #          parent.opsubdag_1:
             #              -> subdag_1.task
 
             with dag:
 
                 def subdag_0():
-                    subdag_0 = DAG('master.op_subdag_0', default_args=default_args)
+                    subdag_0 = DAG('parent.op_subdag_0', default_args=default_args)
                     DummyOperator(task_id='subdag_0.task', dag=subdag_0)
                     return subdag_0
 
                 def subdag_1():
-                    subdag_1 = DAG('master.op_subdag_1', default_args=default_args)
+                    subdag_1 = DAG('parent.op_subdag_1', default_args=default_args)
                     DummyOperator(task_id='subdag_1.task', dag=subdag_1)
                     return subdag_1
 
@@ -374,58 +374,58 @@ class TestDagBag(unittest.TestCase):
             from airflow.operators.dummy import DummyOperator
             from airflow.operators.subdag import SubDagOperator
 
-            dag_name = 'master'
+            dag_name = 'parent'
             default_args = {'owner': 'owner1', 'start_date': datetime.datetime(2016, 1, 1)}
             dag = DAG(dag_name, default_args=default_args)
 
-            # master:
+            # parent:
             #     A -> op_subdag_0
-            #          master.op_subdag_0:
+            #          parent.op_subdag_0:
             #              -> opSubDag_A
-            #                 master.op_subdag_0.opSubdag_A:
+            #                 parent.op_subdag_0.opSubdag_A:
             #                     -> subdag_a.task
             #              -> opSubdag_B
-            #                 master.op_subdag_0.opSubdag_B:
+            #                 parent.op_subdag_0.opSubdag_B:
             #                     -> subdag_b.task
             #     A -> op_subdag_1
-            #          master.op_subdag_1:
+            #          parent.op_subdag_1:
             #              -> opSubdag_C
-            #                 master.op_subdag_1.opSubdag_C:
+            #                 parent.op_subdag_1.opSubdag_C:
             #                     -> subdag_c.task
             #              -> opSubDag_D
-            #                 master.op_subdag_1.opSubdag_D:
+            #                 parent.op_subdag_1.opSubdag_D:
             #                     -> subdag_d.task
 
             with dag:
 
                 def subdag_a():
-                    subdag_a = DAG('master.op_subdag_0.opSubdag_A', default_args=default_args)
+                    subdag_a = DAG('parent.op_subdag_0.opSubdag_A', default_args=default_args)
                     DummyOperator(task_id='subdag_a.task', dag=subdag_a)
                     return subdag_a
 
                 def subdag_b():
-                    subdag_b = DAG('master.op_subdag_0.opSubdag_B', default_args=default_args)
+                    subdag_b = DAG('parent.op_subdag_0.opSubdag_B', default_args=default_args)
                     DummyOperator(task_id='subdag_b.task', dag=subdag_b)
                     return subdag_b
 
                 def subdag_c():
-                    subdag_c = DAG('master.op_subdag_1.opSubdag_C', default_args=default_args)
+                    subdag_c = DAG('parent.op_subdag_1.opSubdag_C', default_args=default_args)
                     DummyOperator(task_id='subdag_c.task', dag=subdag_c)
                     return subdag_c
 
                 def subdag_d():
-                    subdag_d = DAG('master.op_subdag_1.opSubdag_D', default_args=default_args)
+                    subdag_d = DAG('parent.op_subdag_1.opSubdag_D', default_args=default_args)
                     DummyOperator(task_id='subdag_d.task', dag=subdag_d)
                     return subdag_d
 
                 def subdag_0():
-                    subdag_0 = DAG('master.op_subdag_0', default_args=default_args)
+                    subdag_0 = DAG('parent.op_subdag_0', default_args=default_args)
                     SubDagOperator(task_id='opSubdag_A', dag=subdag_0, subdag=subdag_a())
                     SubDagOperator(task_id='opSubdag_B', dag=subdag_0, subdag=subdag_b())
                     return subdag_0
 
                 def subdag_1():
-                    subdag_1 = DAG('master.op_subdag_1', default_args=default_args)
+                    subdag_1 = DAG('parent.op_subdag_1', default_args=default_args)
                     SubDagOperator(task_id='opSubdag_C', dag=subdag_1, subdag=subdag_c())
                     SubDagOperator(task_id='opSubdag_D', dag=subdag_1, subdag=subdag_d())
                     return subdag_1