You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by ar...@apache.org on 2022/05/18 00:00:00 UTC

[tvm] 01/36: making some progress

This is an automated email from the ASF dual-hosted git repository.

areusch pushed a commit to branch areusch/freeze-dependencies
in repository https://gitbox.apache.org/repos/asf/tvm.git

commit 6aa0041f5cbcebafb1b4f4f6ec6397ff5c112232
Author: Andrew Reusch <ar...@gmail.com>
AuthorDate: Fri Apr 22 17:06:45 2022 -0700

    making some progress
---
 pyproject.toml => docker/Dockerfile.base_arm64     |  45 ++--
 .../{Dockerfile.ci_i386 => Dockerfile.base_i386}   |  39 +---
 pyproject.toml => docker/Dockerfile.base_x86_64    |  38 +---
 docker/Dockerfile.ci_cpu                           |  18 +-
 docker/Dockerfile.ci_i386                          |   7 -
 docker/build-base-images.sh                        |  30 +++
 docker/build.sh                                    |  36 ++-
 docker/ci-constraints.txt                          |  14 ++
 docker/freeze_deps.py                              | 252 +++++++++++++++++++++
 docker/install/ubuntu1804_install_python.sh        |  39 +++-
 docker/install/ubuntu_install_core.sh              |   2 +-
 docker/install/ubuntu_install_python_package.sh    |  27 +--
 docker/python/bootstrap-requirements-i386.txt      |  41 ++++
 docker/python/bootstrap-requirements-x86_64.txt    |  41 ++++
 pyproject.toml                                     |   6 +
 15 files changed, 478 insertions(+), 157 deletions(-)

diff --git a/pyproject.toml b/docker/Dockerfile.base_arm64
similarity index 63%
copy from pyproject.toml
copy to docker/Dockerfile.base_arm64
index 5cca711ddb..31a43c2edc 100644
--- a/pyproject.toml
+++ b/docker/Dockerfile.base_arm64
@@ -15,34 +15,19 @@
 # specific language governing permissions and limitations
 # under the License.
 
-[tool.black]
-line-length = 100
-target-version = ['py36']
-include = '(\.pyi?$)'
-exclude = '''
+# CI docker arm env
+# tag: v0.02
 
-(
-  /(
-      \.github
-    | \.tvm
-    | \.tvm_test_data
-    | \.vscode
-    | \.venv
-    | 3rdparty
-    | build\/
-    | cmake\/
-    | conda\/
-    | docker\/
-    | docs\/
-    | golang\/
-    | include\/
-    | jvm\/
-    | licenses\/
-    | nnvm\/
-    | rust\/
-    | src\/
-    | vta\/
-    | web\/
-  )/
-)
-'''
+FROM ubuntu:18.04
+
+RUN apt-get update --fix-missing
+RUN apt-get install -y ca-certificates gnupg2
+
+COPY install/ubuntu_install_core.sh /install/ubuntu_install_core.sh
+RUN bash /install/ubuntu_install_core.sh
+
+COPY install/ubuntu1804_install_python.sh /install/ubuntu1804_install_python.sh
+RUN bash /install/ubuntu1804_install_python.sh
+
+# Globally disable pip cache
+RUN pip config set global.no-cache-dir false
diff --git a/docker/Dockerfile.ci_i386 b/docker/Dockerfile.base_i386
similarity index 53%
copy from docker/Dockerfile.ci_i386
copy to docker/Dockerfile.base_i386
index 61ba064ff3..ae58d6dcac 100644
--- a/docker/Dockerfile.ci_i386
+++ b/docker/Dockerfile.base_i386
@@ -25,44 +25,13 @@ RUN apt-get update --fix-missing && apt-get install -y ca-certificates
 COPY install/ubuntu_install_core.sh /install/ubuntu_install_core.sh
 RUN bash /install/ubuntu_install_core.sh
 
-COPY install/ubuntu_install_googletest.sh /install/ubuntu_install_googletest.sh
-RUN bash /install/ubuntu_install_googletest.sh
-
-COPY install/ubuntu_install_llvm.sh /install/ubuntu_install_llvm.sh
-RUN bash /install/ubuntu_install_llvm.sh
-
-COPY install/ubuntu1804_install_python.sh /install/ubuntu1804_install_python.sh
-RUN bash /install/ubuntu1804_install_python.sh
-
-# Rust env (build early; takes a while)
+# Rust env (build early; takes a while). Needed for some python packages (e.g. cryptography).
 COPY install/ubuntu_install_rust.sh /install/ubuntu_install_rust.sh
 RUN bash /install/ubuntu_install_rust.sh
 ENV RUSTUP_HOME /opt/rust
 ENV CARGO_HOME /opt/rust
 ENV PATH $PATH:$CARGO_HOME/bin
 
-# Globally disable pip cache
-RUN pip config set global.no-cache-dir false
-
-COPY install/ubuntu_install_cmake_source.sh /install/ubuntu_install_cmake_source.sh
-RUN bash /install/ubuntu_install_cmake_source.sh
-
-COPY install/ubuntu_install_python_package.sh /install/ubuntu_install_python_package.sh
-RUN bash /install/ubuntu_install_python_package.sh
-
-# AutoTVM deps
-COPY install/ubuntu_install_redis.sh /install/ubuntu_install_redis.sh
-RUN bash /install/ubuntu_install_redis.sh
-
-# Chisel deps for TSIM
-COPY install/ubuntu_install_sbt.sh /install/ubuntu_install_sbt.sh
-RUN bash /install/ubuntu_install_sbt.sh
-
-# Verilator deps
-COPY install/ubuntu_install_verilator.sh /install/ubuntu_install_verilator.sh
-RUN bash /install/ubuntu_install_verilator.sh
-
-# sccache
-COPY install/ubuntu_install_sccache.sh /install/ubuntu_install_sccache.sh
-RUN bash /install/ubuntu_install_sccache.sh
-ENV PATH /opt/sccache:$PATH
+COPY python/bootstrap-requirements-i386.txt /install/python/bootstrap-requirements.txt
+COPY install/ubuntu1804_install_python.sh /install/ubuntu1804_install_python.sh
+RUN bash /install/ubuntu1804_install_python.sh
diff --git a/pyproject.toml b/docker/Dockerfile.base_x86_64
similarity index 66%
copy from pyproject.toml
copy to docker/Dockerfile.base_x86_64
index 5cca711ddb..eb2c84454c 100644
--- a/pyproject.toml
+++ b/docker/Dockerfile.base_x86_64
@@ -15,34 +15,12 @@
 # specific language governing permissions and limitations
 # under the License.
 
-[tool.black]
-line-length = 100
-target-version = ['py36']
-include = '(\.pyi?$)'
-exclude = '''
+# CI docker CPU env
+FROM ubuntu:18.04
 
-(
-  /(
-      \.github
-    | \.tvm
-    | \.tvm_test_data
-    | \.vscode
-    | \.venv
-    | 3rdparty
-    | build\/
-    | cmake\/
-    | conda\/
-    | docker\/
-    | docs\/
-    | golang\/
-    | include\/
-    | jvm\/
-    | licenses\/
-    | nnvm\/
-    | rust\/
-    | src\/
-    | vta\/
-    | web\/
-  )/
-)
-'''
+COPY python/bootstrap-requirements-x86_64.txt /install/python/bootstrap-requirements.txt
+COPY install/ubuntu1804_install_python.sh /install/ubuntu1804_install_python.sh
+RUN bash /install/ubuntu1804_install_python.sh
+
+COPY install/ubuntu_install_core.sh /install/ubuntu_install_core.sh
+RUN bash /install/ubuntu_install_core.sh
diff --git a/docker/Dockerfile.ci_cpu b/docker/Dockerfile.ci_cpu
index 45943334a0..40a6eedab7 100644
--- a/docker/Dockerfile.ci_cpu
+++ b/docker/Dockerfile.ci_cpu
@@ -16,22 +16,10 @@
 # under the License.
 
 # CI docker CPU env
-FROM ubuntu:18.04
-
-RUN apt-get update --fix-missing
-
-COPY install/ubuntu_install_core.sh /install/ubuntu_install_core.sh
-RUN bash /install/ubuntu_install_core.sh
-
-COPY install/ubuntu_install_googletest.sh /install/ubuntu_install_googletest.sh
-RUN bash /install/ubuntu_install_googletest.sh
-
-COPY install/ubuntu1804_install_python.sh /install/ubuntu1804_install_python.sh
-RUN bash /install/ubuntu1804_install_python.sh
-
-# Globally disable pip cache
-RUN pip config set global.no-cache-dir false
+FROM tvm_ci.base_x86_64:latest
 
+COPY install/build/base_x86_64/pyproject.toml /install/python/pyproject.toml
+COPY install/build/base_x86_64/poetry.lock /install/python/poetry.lock
 COPY install/ubuntu_install_python_package.sh /install/ubuntu_install_python_package.sh
 RUN bash /install/ubuntu_install_python_package.sh
 
diff --git a/docker/Dockerfile.ci_i386 b/docker/Dockerfile.ci_i386
index 61ba064ff3..bbf449d9c3 100644
--- a/docker/Dockerfile.ci_i386
+++ b/docker/Dockerfile.ci_i386
@@ -34,13 +34,6 @@ RUN bash /install/ubuntu_install_llvm.sh
 COPY install/ubuntu1804_install_python.sh /install/ubuntu1804_install_python.sh
 RUN bash /install/ubuntu1804_install_python.sh
 
-# Rust env (build early; takes a while)
-COPY install/ubuntu_install_rust.sh /install/ubuntu_install_rust.sh
-RUN bash /install/ubuntu_install_rust.sh
-ENV RUSTUP_HOME /opt/rust
-ENV CARGO_HOME /opt/rust
-ENV PATH $PATH:$CARGO_HOME/bin
-
 # Globally disable pip cache
 RUN pip config set global.no-cache-dir false
 
diff --git a/docker/build-base-images.sh b/docker/build-base-images.sh
new file mode 100755
index 0000000000..79c76dca58
--- /dev/null
+++ b/docker/build-base-images.sh
@@ -0,0 +1,30 @@
+#!/bin/bash -eux
+
+# Build base images (one per Python architecture) used in building the remaining TVM docker images.
+set -eux
+
+IMAGES=( )
+while [ "${1+x}" == "x" ]; do
+    IMAGES=( "${IMAGES[@]}" "$(dirname $0)/Dockerfile.base_$1" )
+    shift
+done
+
+if [ "${#IMAGES}" -eq 0 ]; then
+    IMAGES=$(ls -1 $(dirname $0)/Dockerfile.base_*)
+fi
+
+for docker_file in "${IMAGES[@]}"; do
+    git check-ignore "${docker_file}" && continue || /bin/true
+    arch=${docker_file#"$(dirname $0)/Dockerfile.base_"}
+    echo "Building base image for architecture ${arch}"
+    $(dirname $0)/build.sh "base_${arch}" --platform "${arch}"
+
+    # NOTE: working dir inside docker is repo root.
+    $(dirname $0)/bash.sh -it "tvm.base_${arch}:latest" python3 docker/freeze_deps.py \
+                 --ci-constraints=docker/ci-constraints.txt \
+                 --gen-requirements-py=python/gen_requirements.py \
+                 --template-pyproject-toml=pyproject.toml \
+                 --output-pyproject-toml=docker/build/base_${arch}/pyproject.toml \
+                 --output-poetry-lock=docker/build/base_${arch}/poetry.lock
+
+done
diff --git a/docker/build.sh b/docker/build.sh
index ed67b638c7..6f9c352600 100755
--- a/docker/build.sh
+++ b/docker/build.sh
@@ -24,7 +24,7 @@
 #                [--dockerfile <DOCKERFILE_PATH>] [-it]
 #                [--net=host] [--cache-from <IMAGE_NAME>]
 #                [--name CONTAINER_NAME] [--context-path <CONTEXT_PATH>]
-#                [--spec DOCKER_IMAGE_SPEC]
+#                [--spec DOCKER_IMAGE_SPEC] [--platform <PLATFORM>]
 #                [<COMMAND>]
 #
 # CONTAINER_TYPE: Type of the docker container used the run the build,
@@ -43,6 +43,8 @@
 # IMAGE_NAME: An image to be as a source for cached layers when building the
 #             Docker image requested.
 #
+# PLATFORM: Docker platform suitable to be passed to docker buildx build --platform.
+#
 # CONTAINER_NAME: The name of the docker container, and the hostname that will
 #                 appear inside the container.
 #
@@ -88,7 +90,15 @@ if [[ "$1" == "--net=host" ]]; then
     shift 1
 fi
 
-DOCKER_NO_CACHE_ARG=--no-cache
+PLATFORM=
+if [[ "$1" == "--platform" ]]; then
+    shift
+    PLATFORM="$1"
+    shift
+fi
+
+DOCKER_NO_CACHE_ARG=
+#--no-cache
 
 if [[ "$1" == "--cache-from" ]]; then
     shift 1
@@ -180,6 +190,7 @@ echo "WORKSPACE: ${WORKSPACE}"
 echo "CI_DOCKER_EXTRA_PARAMS: ${CI_DOCKER_EXTRA_PARAMS[@]}"
 echo "COMMAND: ${COMMAND[@]}"
 echo "CONTAINER_TYPE: ${CONTAINER_TYPE}"
+echo "PLATFORM: ${PLATFORM}"
 echo "BUILD_TAG: ${BUILD_TAG}"
 echo "DOCKER CONTAINER NAME: ${DOCKER_IMG_NAME}"
 echo "DOCKER_IMAGE_TAG: ${DOCKER_IMAGE_TAG}"
@@ -188,12 +199,23 @@ echo ""
 
 
 # Build the docker container.
+cmd=( docker )
+if [ -n "${PLATFORM}" ]; then
+    cmd=( "${cmd[@]}" buildx build --platform "${PLATFORM}" )
+else
+    cmd=( "${cmd[@]}" build )
+fi
+cmd=( "${cmd[@]}" \
+          -t "${DOCKER_IMG_SPEC}" \
+          "${DOCKER_NO_CACHE_ARG}" \
+          -f "${DOCKERFILE_PATH}" \
+          "${CI_DOCKER_BUILD_EXTRA_PARAMS[@]}" \
+          "${DOCKER_CONTEXT_PATH}" \
+    )
+
 echo "Building container (${DOCKER_IMG_NAME})..."
-docker build -t ${DOCKER_IMG_SPEC} \
-    ${DOCKER_NO_CACHE_ARG} \
-    -f "${DOCKERFILE_PATH}" \
-    ${CI_DOCKER_BUILD_EXTRA_PARAMS[@]} \
-    "${DOCKER_CONTEXT_PATH}"
+echo "${cmd[@]}"
+${cmd[@]}
 
 # Check docker build status
 if [[ $? != "0" ]]; then
diff --git a/docker/ci-constraints.txt b/docker/ci-constraints.txt
new file mode 100644
index 0000000000..efe4e1c473
--- /dev/null
+++ b/docker/ci-constraints.txt
@@ -0,0 +1,14 @@
+# This file lists packages we intentionally hold back in CI for no reason other than that
+# updates outside of these bounds require a considerable amount of work, and allowing them to float
+# freely would mean that small changes to the TVM dependency set could be held up behind large
+# migration tasks if a new version of these packages were to be released. Holding packages back
+# here allows us to decide when to tackle such migration work.
+#keras = "^2.6.0"
+#mxnet = "^1.6.0"
+
+onnx = "^1.10.0"
+numpy = "==1.19.3"
+paddlepaddle = "==2.1.3"
+sphinx = "==4.2.0"
+#sphinx-gallery = "==0.4.0"
+tensorflow = "^2.6"
diff --git a/docker/freeze_deps.py b/docker/freeze_deps.py
new file mode 100644
index 0000000000..99fb492189
--- /dev/null
+++ b/docker/freeze_deps.py
@@ -0,0 +1,252 @@
+#!/usr/bin/env python3
+import argparse
+import importlib
+import pathlib
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import typing
+
+
+SECTION_RE = re.compile(r'\[([^]]+)\].*')
+
+
+def remove_sections(lines : typing.List[str], section_names : typing.List[str]) -> typing.List[str]:
+  """Remove .toml sections from a list of lines.
+
+  Parameters
+  ----------
+  lines : list[str]
+      A list containing the lines of the toml file.
+  section_names : list[str]
+      A list of names of sections which should be removed.
+
+  Returns
+  -------
+  (removed, insert_points):
+      A 2-tuple. `removed` is a new list of strings with those sections removed. `insert_points` is
+      a dict containing an entry for each section removed; key is the section name and value is the
+      index into `removed` where that section would have been.
+  """
+  removed = []
+  insert_points = {}
+  drop_line = False
+  for line in lines:
+    m = SECTION_RE.match(line)
+    if m:
+      drop_line = m.group(1) in section_names
+      insert_points[m.group(1)] = len(removed)
+
+    if not drop_line:
+      removed.append(line)
+
+  return removed, insert_points
+
+
+def write_dependencies(requirements_by_piece : dict, constraints : dict, output_f):
+  """Write the [tool.poetry.dependencies] section of pyproject.toml.
+
+  Parameters
+  ----------
+  requirements_by_piece : dict
+      The REQUIREMENTS_BY_PIECE dict from gen_requirements.py module.
+  constraints : dict
+      The CONSTRAINTS dict from gen_requirements.py module, updated with additional constraints from
+      ci-constraints.txt.
+  output_f : File
+      A file-like object where the section should be written.
+  """
+  output_f.write("[tool.poetry.dependencies]\n"
+                 'python = ">=3.7, <3.9"\n')
+  core_packages = set(requirements_by_piece["core"][1])
+  dev_packages = set(requirements_by_piece["dev"][1])
+
+  for package, constraint in constraints.items():
+    if package in dev_packages:
+      continue
+
+    optional = package not in core_packages
+    output_f.write(
+        f"{package} = {{ version = \"{constraint or '*'}\", optional = {str(optional).lower()} }}\n")
+
+  output_f.write("\n")
+
+
+def write_dev_dependencies(requirements_by_piece : dict, constraints : dict, output_f):
+  """Write the [tool.poetry.dev-dependencies] section of pyproject.toml.
+
+  Parameters
+  ----------
+  requirements_by_piece : dict
+      The REQUIREMENTS_BY_PIECE dict from gen_requirements.py module.
+  constraints : dict
+      The CONSTRAINTS dict from gen_requirements.py module, updated with additional constraints from
+      ci-constraints.txt.
+  output_f : File
+      A file-like object where the section should be written.
+  """
+  output_f.write("[tool.poetry.dev-dependencies]\n")
+  dev_packages = set(requirements_by_piece["dev"][1])
+
+  for package, constraint in constraints.items():
+    if package not in dev_packages:
+      continue
+
+    output_f.write(f"{package} = \"{constraint or '*'}\"\n")
+
+  output_f.write("\n")
+
+
+def write_extras(requirements_by_piece : dict, constraints : dict, output_f):
+  """Write the [tool.poetry.extras] section of pyproject.toml.
+
+  Parameters
+  ----------
+  requirements_by_piece : dict
+      The REQUIREMENTS_BY_PIECE dict from gen_requirements.py module.
+  constraints : dict
+      The CONSTRAINTS dict from gen_requirements.py module, updated with additional constraints from
+      ci-constraints.txt.
+  output_f : File
+      A file-like object where the section should be written.
+  """
+  output_f.write("[tool.poetry.extras]\n")
+
+  for piece, (description, packages) in requirements_by_piece.items():
+    if piece in ("core", "dev"):
+      # These pieces do not need an extras declaration.
+      continue
+
+    output_f.write(f"# {description}\n")
+    package_list = ", ".join(f'"{p}"' for p in sorted(packages))
+    output_f.write(f"{piece} = [{package_list}]\n\n")
+
+  output_f.write("\n")
+
+
+# List of all the emitted sections in order they are to be emitted.
+SECTION_ORDER = ("tool.poetry.dependencies", "tool.poetry.dev-dependencies", "tool.poetry.extras")
+
+
+CI_CONSTRAINTS_RE = re.compile(r'(?P<package_name>[a-zA-Z0-9_-]+) = "(?P<version>[^"]+)".*')
+
+
+def generate_pyproject_toml(ci_constraints_txt : pathlib.Path, gen_requirements_py : pathlib.Path,
+                            template_pyproject_toml : pathlib.Path,
+                            output_pyproject_toml : pathlib.Path):
+  """Generate poetry dependencies sections in pyproject.toml from gen_requirements.py.
+
+  Existing [tool.poetry.dev-dependencies], [tool.poetry.dependencies], and [tool.poetry.extras]
+  sections are overwritten.
+
+  Parameters
+  ----------
+  ci_constraints_txt : pathlib.Path
+      Path to ci-constraints.txt.
+  gen_requirements_py : pathlib.Path
+      Path to the python/gen_requirements.py file in TVM.
+  template_pyproject_toml : pathlib.Path
+      Path to a pyproject.toml whose [{dev-,}dependencies] sections should be replaced with those from
+      gen_requirements.py. In production, this is expected to be the checked-in pyproject.toml at
+      the root of the TVM repo.
+  output_pyproject_toml : pathlib.Path
+      Non-existent path to the revised pyproject.toml.
+  """
+  with open(template_pyproject_toml) as template_f:
+    pyproject_toml, insert_points = remove_sections(template_f, SECTION_ORDER)
+
+  insert_points = {s: insert_points.get(s, len(pyproject_toml)) for s in SECTION_ORDER}
+
+  sys.path.insert(0, str(gen_requirements_py.resolve().parent))
+  gen_requirements = importlib.import_module(gen_requirements_py.stem)
+  sys.path.pop(0)
+
+  constraints = dict(gen_requirements.CONSTRAINTS)
+  with open(ci_constraints_txt) as ci_constraints_f:
+    for i, line in enumerate(ci_constraints_f):
+      if not line.strip():
+        continue
+
+      m = CI_CONSTRAINTS_RE.match(line)
+      if not m:
+        if line.startswith("#"):
+          continue
+        print(f"{ci_constraints_txt}: {i}: Malformed line {line}")
+        sys.exit(2)
+
+      package_name = m.group("package_name")
+      if package_name not in constraints:
+        print(f"{ci_constraints_txt}: {i}: Package {package_name} not listed in gen_requirements.py")
+        sys.exit(2)
+
+      if constraints.get(package_name):
+        print(f"{ci_constraints_txt}: {i}: Package {package_name} already functionally constrained in gen_requirements.py")
+        sys.exit(2)
+
+      constraints[package_name] = m.group("version")
+
+  stop_points = list(sorted([(v, k) for k, v in insert_points.items()], key=lambda x: (x[0], SECTION_ORDER.index(x[1]))))
+  next_stop = stop_points.pop(0)
+  with open(output_pyproject_toml, "w") as output_f:
+    def _write(next_stop, i):
+      while next_stop[0] == i:
+        writer_function_name = f"write_{next_stop[1][len('tool.poetry.'):].replace('-', '_')}"
+        globals()[writer_function_name](dict(gen_requirements.REQUIREMENTS_BY_PIECE), constraints, output_f)
+        next_stop = stop_points.pop(0) if stop_points else (None, "")
+
+      return next_stop
+
+    for i, line in enumerate(pyproject_toml):
+      next_stop = _write(next_stop, i)
+      output_f.write(line)
+
+    next_stop = _write(next_stop, len(pyproject_toml))
+    assert next_stop[0] is None, f"Did not write all sections. Remaining: {next_stop}"
+
+
+def freeze_deps(output_pyproject_toml):
+  subprocess.check_call(["poetry", "lock", "-v"], cwd=output_pyproject_toml.parent)
+
+
+REPO_ROOT = pathlib.Path(__file__).parent.parent
+
+
+def parse_args(argv : typing.List[str]) -> argparse.Namespace:
+  parser = argparse.ArgumentParser(argv[0], usage="Create a pyproject.toml containing the information in python/gen_requirements.py")
+  parser.add_argument("--ci-constraints",
+                      type=pathlib.Path, default=REPO_ROOT / "docker/ci-constraints.txt",
+                      help=("Path to a file describing packages held back in "
+                            "CI to make routine package updates possible."))
+  parser.add_argument("--gen-requirements-py",
+                      type=pathlib.Path, default=REPO_ROOT / "python" / "gen_requirements.py",
+                      help="Path to python/gen_requirements.py in the TVM repo")
+  parser.add_argument("--template-pyproject-toml",
+                      type=pathlib.Path,
+                      help="Path to the pyproject.toml to use as a basis for the updated pyproject.toml.")
+  parser.add_argument("--output-pyproject-toml",
+                      type=pathlib.Path,
+                      help="Path where the updated pyproject.toml should be written.")
+  parser.add_argument("--output-poetry-lock",
+                      type=pathlib.Path,
+                      help="Path where the poetry.lock file should be written.")
+
+  return parser.parse_args(argv[1:])
+
+
+def main(argv : typing.List[str]):
+  args = parse_args(argv)
+
+  with tempfile.TemporaryDirectory() as temp_dir:
+      temp_pyproject_toml = pathlib.Path(temp_dir) / "pyproject.toml"
+      generate_pyproject_toml(args.ci_constraints, args.gen_requirements_py, args.template_pyproject_toml, temp_pyproject_toml)
+      freeze_deps(temp_pyproject_toml)
+      args.output_pyproject_toml.parent.mkdir(exist_ok=True, parents=True)
+      shutil.copyfile(temp_pyproject_toml, args.output_pyproject_toml)
+      args.output_poetry_lock.parent.mkdir(exist_ok=True, parents=True)
+      shutil.copyfile(pathlib.Path(temp_dir) / "poetry.lock", args.output_poetry_lock)
+
+
+if __name__ == "__main__":
+  main(sys.argv)
diff --git a/docker/install/ubuntu1804_install_python.sh b/docker/install/ubuntu1804_install_python.sh
index 94d316199d..f5a40a03a5 100755
--- a/docker/install/ubuntu1804_install_python.sh
+++ b/docker/install/ubuntu1804_install_python.sh
@@ -19,6 +19,7 @@
 set -e
 set -u
 set -o pipefail
+set -x
 
 
 cleanup() {
@@ -35,12 +36,34 @@ apt-get install -y software-properties-common
 apt-get install -y python3.7 python3.7-dev python3-pip
 update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.7 1
 
-# Pin pip and setuptools versions
-# Hashes generated via:
-#   $ pip download <package>==<version>
-#   $ pip hash --algorithm sha512 <package>.whl
-cat <<EOF > base-requirements.txt
-pip==19.3.1 --hash=sha256:6917c65fc3769ecdc61405d3dfd97afdedd75808d200b2838d7d961cebc0c2c7
-setuptools==58.4.0 --hash=sha256:e8b1d3127a0441fb99a130bcc3c2bf256c2d3ead3aba8fd400e5cbbaf788e036
+function download_hash() {
+    cat >/tmp/hash-bootstrap-packages.py <<EOF
+import os
+import os.path
+import subprocess
+import pkginfo
+
+for f in sorted(os.scandir("."), key=lambda x: x.name):
+  if not f.is_file():
+    continue
+  p = pkginfo.get_metadata(f.name)
+  if not p:
+    continue
+  print(f"{p.name}=={p.version} {subprocess.check_output(['pip3', 'hash', '-a', 'sha256', p.filename], encoding='utf-8').split()[1]} # {f.name}")
 EOF
-pip3 install -r base-requirements.txt
+    mkdir packages && cd packages
+    pip3 install -U "$@"
+    pip3 download pip poetry setuptools
+    python3 /tmp/hash-bootstrap-packages.py
+    exit 2 # make docker build stop
+}
+
+# Install bootstrap packages. You can update these with the following procedure:
+# 1. Uncomment the line below, then attempt torebuild the base images (it will fail).
+# 2. New hashes should be printed in the terminal log from each docker build. Copy these hashes into the
+#    the arch-appropriate file in docker/python/bootstrap-requirements/
+# download_hash pip setuptools pkginfo
+
+pip3 install -U pip -c /install/python/bootstrap-requirements.txt  # Update pip to match version used to produce base-requirements.txt
+pip3 config set global.no-cache-dir false
+pip3 install -r /install/python/bootstrap-requirements.txt -c /install/python/bootstrap-requirements.txt
diff --git a/docker/install/ubuntu_install_core.sh b/docker/install/ubuntu_install_core.sh
index 5593d61ea5..07de29b921 100755
--- a/docker/install/ubuntu_install_core.sh
+++ b/docker/install/ubuntu_install_core.sh
@@ -21,7 +21,7 @@ set -u
 set -o pipefail
 
 # install libraries for building c++ core on ubuntu
-apt-get update && apt-get install -y --no-install-recommends \
+apt-get update --fix-missing && apt-get install -y --no-install-recommends \
     apt-transport-https \
     ca-certificates \
     cmake \
diff --git a/docker/install/ubuntu_install_python_package.sh b/docker/install/ubuntu_install_python_package.sh
index 0353814efc..0b0c9895f5 100755
--- a/docker/install/ubuntu_install_python_package.sh
+++ b/docker/install/ubuntu_install_python_package.sh
@@ -20,27 +20,6 @@ set -e
 set -u
 set -o pipefail
 
-# install libraries for python package on ubuntu
-pip3 install --upgrade \
-    attrs \
-    cloudpickle \
-    cython \
-    decorator \
-    mypy \
-    numpy~=1.19.5 \
-    orderedset \
-    packaging \
-    Pillow==9.1.0 \
-    psutil \
-    pytest \
-    tlcpack-sphinx-addon==0.2.1 \
-    pytest-profiling \
-    pytest-xdist \
-    requests \
-    scipy \
-    Jinja2 \
-    synr==0.6.0 \
-    junitparser==2.4.2 \
-    six \
-    tornado \
-    pytest-lazy-fixture
+cd $(dirname $0)/python
+poetry config settings.virtualenvs.create false
+poetry install --no-root
diff --git a/docker/python/bootstrap-requirements-i386.txt b/docker/python/bootstrap-requirements-i386.txt
new file mode 100644
index 0000000000..722a45704d
--- /dev/null
+++ b/docker/python/bootstrap-requirements-i386.txt
@@ -0,0 +1,41 @@
+CacheControl==0.12.11 --hash=sha256:2c75d6a8938cb1933c75c50184549ad42728a27e9f6b92fd677c3151aa72555b # CacheControl-0.12.11-py2.py3-none-any.whl
+SecretStorage==3.3.2 --hash=sha256:755dc845b6ad76dcbcbc07ea3da75ae54bb1ea529eb72d15f83d26499a5df319 # SecretStorage-3.3.2-py3-none-any.whl
+cachy==0.3.0 --hash=sha256:338ca09c8860e76b275aff52374330efedc4d5a5e45dc1c5b539c1ead0786fe7 # cachy-0.3.0-py2.py3-none-any.whl
+certifi==2021.10.8 --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569 # certifi-2021.10.8-py2.py3-none-any.whl
+cffi==1.15.0 --hash=sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4 # cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl
+charset-normalizer==2.0.12 --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df # charset_normalizer-2.0.12-py3-none-any.whl
+cleo==0.8.1 --hash=sha256:141cda6dc94a92343be626bb87a0b6c86ae291dfc732a57bf04310d4b4201753 # cleo-0.8.1-py2.py3-none-any.whl
+clikit==0.6.2 --hash=sha256:71268e074e68082306e23d7369a7b99f824a0ef926e55ba2665e911f7208489e # clikit-0.6.2-py2.py3-none-any.whl
+crashtest==0.3.1 --hash=sha256:300f4b0825f57688b47b6d70c6a31de33512eb2fa1ac614f780939aa0cf91680 # crashtest-0.3.1-py3-none-any.whl
+cryptography==36.0.2 --hash=sha256:70f8f4f7bb2ac9f340655cbac89d68c527af5bb4387522a8413e841e3e6628c9 # cryptography-36.0.2.tar.gz
+distlib==0.3.4 --hash=sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b # distlib-0.3.4-py2.py3-none-any.whl
+filelock==3.6.0 --hash=sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0 # filelock-3.6.0-py3-none-any.whl
+html5lib==1.1 --hash=sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d # html5lib-1.1-py2.py3-none-any.whl
+idna==3.3 --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff # idna-3.3-py3-none-any.whl
+importlib-metadata==1.7.0 --hash=sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070 # importlib_metadata-1.7.0-py2.py3-none-any.whl
+jeepney==0.8.0 --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 # jeepney-0.8.0-py3-none-any.whl
+keyring==22.3.0 --hash=sha256:2bc8363ebdd63886126a012057a85c8cb6e143877afa02619ac7dbc9f38a207b # keyring-22.3.0-py3-none-any.whl
+lockfile==0.12.2 --hash=sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa # lockfile-0.12.2-py2.py3-none-any.whl
+msgpack==1.0.3 --hash=sha256:bf1e6bfed4860d72106f4e0a1ab519546982b45689937b40257cfd820650b920 # msgpack-1.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl
+packaging==20.9 --hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a # packaging-20.9-py2.py3-none-any.whl
+pastel==0.2.1 --hash=sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364 # pastel-0.2.1-py2.py3-none-any.whl
+pexpect==4.8.0 --hash=sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937 # pexpect-4.8.0-py2.py3-none-any.whl
+pip==22.0.4 --hash=sha256:c6aca0f2f081363f689f041d90dab2a07a9a07fb840284db2218117a52da800b # pip-22.0.4-py3-none-any.whl
+pkginfo==1.8.2 --hash=sha256:c24c487c6a7f72c66e816ab1796b96ac6c3d14d49338293d2141664330b55ffc # pkginfo-1.8.2-py2.py3-none-any.whl
+platformdirs==2.5.2 --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 # platformdirs-2.5.2-py3-none-any.whl
+poetry==1.1.13 --hash=sha256:52deb0792a2e801967ba9c4cdb39b56fe68b0b5cd3f195b004bef603db9d51a7 # poetry-1.1.13-py2.py3-none-any.whl
+poetry-core==1.0.8 --hash=sha256:54b0fab6f7b313886e547a52f8bf52b8cf43e65b2633c65117f8755289061924 # poetry_core-1.0.8-py2.py3-none-any.whl
+ptyprocess==0.7.0 --hash=sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35 # ptyprocess-0.7.0-py2.py3-none-any.whl
+pycparser==2.21 --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 # pycparser-2.21-py2.py3-none-any.whl
+pylev==1.4.0 --hash=sha256:7b2e2aa7b00e05bb3f7650eb506fc89f474f70493271a35c242d9a92188ad3dd # pylev-1.4.0-py2.py3-none-any.whl
+pyparsing==3.0.8 --hash=sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06 # pyparsing-3.0.8-py3-none-any.whl
+requests==2.27.1 --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d # requests-2.27.1-py2.py3-none-any.whl
+requests-toolbelt==0.9.1 --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f # requests_toolbelt-0.9.1-py2.py3-none-any.whl
+setuptools==62.1.0 --hash=sha256:26ead7d1f93efc0f8c804d9fafafbe4a44b179580a7105754b245155f9af05a8 # setuptools-62.1.0-py3-none-any.whl
+shellingham==1.4.0 --hash=sha256:536b67a0697f2e4af32ab176c00a50ac2899c5a05e0d8e2dadac8e58888283f9 # shellingham-1.4.0-py2.py3-none-any.whl
+six==1.16.0 --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # six-1.16.0-py2.py3-none-any.whl
+tomlkit==0.10.1 --hash=sha256:3eba517439dcb2f84cf39f4f85fd2c3398309823a3c75ac3e73003638daf7915 # tomlkit-0.10.1-py3-none-any.whl
+urllib3==1.26.9 --hash=sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14 # urllib3-1.26.9-py2.py3-none-any.whl
+virtualenv==20.14.1 --hash=sha256:e617f16e25b42eb4f6e74096b9c9e37713cf10bf30168fb4a739f3fa8f898a3a # virtualenv-20.14.1-py2.py3-none-any.whl
+webencodings==0.5.1 --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 # webencodings-0.5.1-py2.py3-none-any.whl
+zipp==3.8.0 --hash=sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099 # zipp-3.8.0-py3-none-any.whl
diff --git a/docker/python/bootstrap-requirements-x86_64.txt b/docker/python/bootstrap-requirements-x86_64.txt
new file mode 100644
index 0000000000..ddf1ea2571
--- /dev/null
+++ b/docker/python/bootstrap-requirements-x86_64.txt
@@ -0,0 +1,41 @@
+CacheControl==0.12.11 --hash=sha256:2c75d6a8938cb1933c75c50184549ad42728a27e9f6b92fd677c3151aa72555b # CacheControl-0.12.11-py2.py3-none-any.whl
+SecretStorage==3.3.2 --hash=sha256:755dc845b6ad76dcbcbc07ea3da75ae54bb1ea529eb72d15f83d26499a5df319 # SecretStorage-3.3.2-py3-none-any.whl
+cachy==0.3.0 --hash=sha256:338ca09c8860e76b275aff52374330efedc4d5a5e45dc1c5b539c1ead0786fe7 # cachy-0.3.0-py2.py3-none-any.whl
+certifi==2021.10.8 --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569 # certifi-2021.10.8-py2.py3-none-any.whl
+cffi==1.15.0 --hash=sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997 # cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl
+charset-normalizer==2.0.12 --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df # charset_normalizer-2.0.12-py3-none-any.whl
+cleo==0.8.1 --hash=sha256:141cda6dc94a92343be626bb87a0b6c86ae291dfc732a57bf04310d4b4201753 # cleo-0.8.1-py2.py3-none-any.whl
+clikit==0.6.2 --hash=sha256:71268e074e68082306e23d7369a7b99f824a0ef926e55ba2665e911f7208489e # clikit-0.6.2-py2.py3-none-any.whl
+crashtest==0.3.1 --hash=sha256:300f4b0825f57688b47b6d70c6a31de33512eb2fa1ac614f780939aa0cf91680 # crashtest-0.3.1-py3-none-any.whl
+cryptography==36.0.2 --hash=sha256:c2c5250ff0d36fd58550252f54915776940e4e866f38f3a7866d92b32a654b86 # cryptography-36.0.2-cp36-abi3-manylinux_2_24_x86_64.whl
+distlib==0.3.4 --hash=sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b # distlib-0.3.4-py2.py3-none-any.whl
+filelock==3.6.0 --hash=sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0 # filelock-3.6.0-py3-none-any.whl
+html5lib==1.1 --hash=sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d # html5lib-1.1-py2.py3-none-any.whl
+idna==3.3 --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff # idna-3.3-py3-none-any.whl
+importlib-metadata==1.7.0 --hash=sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070 # importlib_metadata-1.7.0-py2.py3-none-any.whl
+jeepney==0.8.0 --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 # jeepney-0.8.0-py3-none-any.whl
+keyring==22.3.0 --hash=sha256:2bc8363ebdd63886126a012057a85c8cb6e143877afa02619ac7dbc9f38a207b # keyring-22.3.0-py3-none-any.whl
+lockfile==0.12.2 --hash=sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa # lockfile-0.12.2-py2.py3-none-any.whl
+msgpack==1.0.3 --hash=sha256:9c0903bd93cbd34653dd63bbfcb99d7539c372795201f39d16fdfde4418de43a # msgpack-1.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
+packaging==20.9 --hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a # packaging-20.9-py2.py3-none-any.whl
+pastel==0.2.1 --hash=sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364 # pastel-0.2.1-py2.py3-none-any.whl
+pexpect==4.8.0 --hash=sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937 # pexpect-4.8.0-py2.py3-none-any.whl
+pip==22.0.4 --hash=sha256:c6aca0f2f081363f689f041d90dab2a07a9a07fb840284db2218117a52da800b # pip-22.0.4-py3-none-any.whl
+pkginfo==1.8.2 --hash=sha256:c24c487c6a7f72c66e816ab1796b96ac6c3d14d49338293d2141664330b55ffc # pkginfo-1.8.2-py2.py3-none-any.whl
+platformdirs==2.5.2 --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 # platformdirs-2.5.2-py3-none-any.whl
+poetry==1.1.13 --hash=sha256:52deb0792a2e801967ba9c4cdb39b56fe68b0b5cd3f195b004bef603db9d51a7 # poetry-1.1.13-py2.py3-none-any.whl
+poetry-core==1.0.8 --hash=sha256:54b0fab6f7b313886e547a52f8bf52b8cf43e65b2633c65117f8755289061924 # poetry_core-1.0.8-py2.py3-none-any.whl
+ptyprocess==0.7.0 --hash=sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35 # ptyprocess-0.7.0-py2.py3-none-any.whl
+pycparser==2.21 --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 # pycparser-2.21-py2.py3-none-any.whl
+pylev==1.4.0 --hash=sha256:7b2e2aa7b00e05bb3f7650eb506fc89f474f70493271a35c242d9a92188ad3dd # pylev-1.4.0-py2.py3-none-any.whl
+pyparsing==3.0.8 --hash=sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06 # pyparsing-3.0.8-py3-none-any.whl
+requests==2.27.1 --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d # requests-2.27.1-py2.py3-none-any.whl
+requests-toolbelt==0.9.1 --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f # requests_toolbelt-0.9.1-py2.py3-none-any.whl
+setuptools==62.1.0 --hash=sha256:26ead7d1f93efc0f8c804d9fafafbe4a44b179580a7105754b245155f9af05a8 # setuptools-62.1.0-py3-none-any.whl
+shellingham==1.4.0 --hash=sha256:536b67a0697f2e4af32ab176c00a50ac2899c5a05e0d8e2dadac8e58888283f9 # shellingham-1.4.0-py2.py3-none-any.whl
+six==1.16.0 --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # six-1.16.0-py2.py3-none-any.whl
+tomlkit==0.10.1 --hash=sha256:3eba517439dcb2f84cf39f4f85fd2c3398309823a3c75ac3e73003638daf7915 # tomlkit-0.10.1-py3-none-any.whl
+urllib3==1.26.9 --hash=sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14 # urllib3-1.26.9-py2.py3-none-any.whl
+virtualenv==20.14.1 --hash=sha256:e617f16e25b42eb4f6e74096b9c9e37713cf10bf30168fb4a739f3fa8f898a3a # virtualenv-20.14.1-py2.py3-none-any.whl
+webencodings==0.5.1 --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 # webencodings-0.5.1-py2.py3-none-any.whl
+zipp==3.8.0 --hash=sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099 # zipp-3.8.0-py3-none-any.whl
diff --git a/pyproject.toml b/pyproject.toml
index 5cca711ddb..843d192d7a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -46,3 +46,9 @@ exclude = '''
   )/
 )
 '''
+
+[tool.poetry]
+name = "apache-tvm"
+authors = []
+version = "0.8.0"
+description = "Open source Deep Learning compliation toolkit"