You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2022/03/19 12:25:38 UTC

[GitHub] [airflow] potiuk commented on a change in pull request #21956: prod image build changes

potiuk commented on a change in pull request #21956:
URL: https://github.com/apache/airflow/pull/21956#discussion_r830478622



##########
File path: dev/breeze/src/airflow_breeze/prod/build_prod_image.py
##########
@@ -0,0 +1,196 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import contextlib
+import sys
+from typing import Dict, List
+
+from airflow_breeze.cache import check_cache_and_write_if_not_cached, write_to_cache_file
+from airflow_breeze.console import console
+from airflow_breeze.prod.prod_params import ProdParams
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCE, DOCKER_CONTEXT_DIR
+from airflow_breeze.utils.run_utils import filter_out_none, run_command
+
+PARAMS_PROD_IMAGE = [
+    "python_base_image",
+    "install_mysql_client",
+    "install_mssql_client",
+    "install_postgres_client",
+    "airflow_version",
+    "airflow_branch",
+    "airflow_extras",
+    "airflow_pre_cached_pip_packages",
+    "additional_airflow_extras",
+    "additional_python_deps",
+    "additional_dev_apt_command",
+    "additional_dev_apt_deps",
+    "additional_dev_apt_env",
+    "additional_runtime_apt_command",
+    "additional_runtime_apt_deps",
+    "additional_runtime_apt_env",
+    "upgrade_to_newer_dependencies",
+    "constraints_github_repository",
+    "airflow_constraints",
+    "airflow_image_repository",
+    "airflow_image_date_created",
+    "build_id",
+    "commit_sha",
+    "airflow_image_readme_url",
+    "install_providers_from_sources",
+    "install_from_pypi",
+    "install_from_docker_context_files",
+]
+
+PARAMS_TO_VERIFY_PROD_IMAGE = [
+    "dev_apt_command",
+    "dev_apt_deps",
+    "runtime_apt_command",
+    "runtime_apt_deps",
+]
+
+
+def construct_arguments_docker_command(prod_image: ProdParams) -> List[str]:
+    args_command = []
+    for param in PARAMS_PROD_IMAGE:
+        args_command.append("--build-arg")
+        args_command.append(param.upper() + "=" + str(getattr(prod_image, param)))
+    for verify_param in PARAMS_TO_VERIFY_PROD_IMAGE:
+        param_value = str(getattr(prod_image, verify_param))
+        if len(param_value) > 0:
+            args_command.append("--build-arg")
+            args_command.append(verify_param.upper() + "=" + param_value)
+    docker_cache = prod_image.docker_cache_prod_directive
+    if len(docker_cache) > 0:
+        args_command.extend(prod_image.docker_cache_prod_directive)
+    return args_command
+
+
+def construct_docker_command(prod_image: ProdParams) -> List[str]:
+    arguments = construct_arguments_docker_command(prod_image)
+    build_command = prod_image.check_buildx_plugin_build_command()
+    build_flags = prod_image.extra_docker_build_flags
+    final_command = []
+    final_command.extend(["docker"])
+    final_command.extend(build_command)
+    final_command.extend(build_flags)
+    final_command.extend(["--pull"])
+    final_command.extend(arguments)
+    final_command.extend(["-t", prod_image.airflow_prod_image_name, "--target", "main", "."])
+    final_command.extend(["-f", 'Dockerfile'])
+    final_command.extend(["--platform", prod_image.platform])
+    return final_command
+
+
+def login_to_docker_registry(build_params: ProdParams):
+    if build_params.ci == "true":
+        if len(build_params.github_token) == 0:
+            console.print("\n[blue]Skip logging in to GitHub Registry. No Token available!")
+        elif build_params.airflow_login_to_github_registry != "true":
+            console.print(
+                "\n[blue]Skip logging in to GitHub Registry.\
+                    AIRFLOW_LOGIN_TO_GITHUB_REGISTRY is set as false"
+            )
+        elif len(build_params.github_token) > 0:
+            run_command(['docker', 'logout', 'ghcr.io'], verbose=True, text=True)
+            run_command(
+                [
+                    'docker',
+                    'login',
+                    '--username',
+                    build_params.github_username,
+                    '--password-stdin',
+                    'ghcr.io',
+                ],
+                verbose=True,
+                text=True,
+                input=build_params.github_token,
+            )
+        else:
+            console.print('\n[blue]Skip Login to GitHub Container Registry as token is missing')
+
+
+def clean_docker_context_files():
+    extensions_to_delete = ['whl', 'tar.gz']

Review comment:
       I think we should just skip README.md. There might be other files in ยง`docker-context-files` that might pollute the cache.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscribe@airflow.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org