You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by ar...@apache.org on 2022/07/12 21:47:46 UTC

[tvm] 01/47: Add freeze_deps tool and modify gen_requirements to work with it.

This is an automated email from the ASF dual-hosted git repository.

areusch pushed a commit to branch areusch/freeze-dependencies
in repository https://gitbox.apache.org/repos/asf/tvm.git

commit e115a2ee07a0c4f158704cff67aa6dbf1ba1a16d
Author: Andrew Reusch <ar...@gmail.com>
AuthorDate: Thu May 19 15:01:03 2022 -0700

    Add freeze_deps tool and modify gen_requirements to work with it.
---
 docker/python/ci-constraints.txt                   |  40 +++
 .../python/freeze-dependencies.sh                  |  42 +--
 docker/python/freeze_deps.py                       | 324 ++++++++++++++++++
 pyproject.toml                                     |  21 ++
 python/gen_requirements.py                         | 366 ++++++++++++++-------
 tests/lint/check_file_type.py                      |   1 +
 6 files changed, 645 insertions(+), 149 deletions(-)

diff --git a/docker/python/ci-constraints.txt b/docker/python/ci-constraints.txt
new file mode 100644
index 0000000000..258d4bd4bd
--- /dev/null
+++ b/docker/python/ci-constraints.txt
@@ -0,0 +1,40 @@
+# This file lists packages we intentionally hold back in CI for no reason other than that
+# updates outside of these bounds require a considerable amount of work, and allowing them to float
+# freely would mean that small changes to the TVM dependency set could be held up behind large
+# migration tasks if a new version of these packages were to be released. Holding packages back
+# here allows us to decide when to tackle such migration work.
+#keras = "^2.6.0"
+#mxnet = "^1.6.0"
+
+black = "<21.8b0"  # Breaks tensorflow-gpu. Revisit when tensorflow is upgraded.
+blocklint = "==0.2.3"
+#commonmark = ">=0.7.3"
+cpplint = "==1.6.0"
+#docutils = ">=0.11,<0.17"
+#ethos-u-vela = "==3.2.0"
+flake8 = "==3.9.2"
+flowvision = "==0.1.0"
+#h5py = "==3.1.0"
+keras = "==2.6"
+jinja2 = "==3.0.3"
+mxnet = "==1.6.0"
+mypy = "==0.902"
+oneflow = "==0.7.0"
+onnx = "==1.10.2"
+onnxruntime = "==1.9.0"
+onnxoptimizer = "==0.2.6"
+numpy = "==1.19.3"
+paddlepaddle = "==2.1.3"
+pillow = "==9.1.0"
+pylint = "==2.4.4"
+scipy = "==1.7.3"
+sphinx = "==4.2.0"
+sphinx-gallery = "==0.4.0"
+tensorflow = "==2.6.2"
+tensorflow-aarch64 = "==2.6.2"
+tensorflow-estimator = "==2.6.0"
+tensorflow-gpu = "==2.6.2"
+tflite = "==2.4.0"
+torch = "==1.11.0"
+torchvision = "==0.12.0"
+#xgboost = "==1.4.2"
diff --git a/pyproject.toml b/docker/python/freeze-dependencies.sh
old mode 100644
new mode 100755
similarity index 59%
copy from pyproject.toml
copy to docker/python/freeze-dependencies.sh
index 5cca711ddb..cf0347b697
--- a/pyproject.toml
+++ b/docker/python/freeze-dependencies.sh
@@ -1,3 +1,4 @@
+#!/bin/bash -eux
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -15,34 +16,15 @@
 # specific language governing permissions and limitations
 # under the License.
 
-[tool.black]
-line-length = 100
-target-version = ['py36']
-include = '(\.pyi?$)'
-exclude = '''
 
-(
-  /(
-      \.github
-    | \.tvm
-    | \.tvm_test_data
-    | \.vscode
-    | \.venv
-    | 3rdparty
-    | build\/
-    | cmake\/
-    | conda\/
-    | docker\/
-    | docs\/
-    | golang\/
-    | include\/
-    | jvm\/
-    | licenses\/
-    | nnvm\/
-    | rust\/
-    | src\/
-    | vta\/
-    | web\/
-  )/
-)
-'''
+# Build base images (one per Python architecture) used in building the remaining TVM docker images.
+set -eux
+
+cd "$(dirname "$0")/../.."
+
+# NOTE: working dir inside docker is repo root.
+docker/bash.sh -i "${BUILD_TAG}.ci_py_deps:latest" python3 docker/python/freeze_deps.py \
+               --ci-constraints=docker/python/ci-constraints.txt \
+               --gen-requirements-py=python/gen_requirements.py \
+               --template-pyproject-toml=pyproject.toml \
+               --output-base=docker/python/build
diff --git a/docker/python/freeze_deps.py b/docker/python/freeze_deps.py
new file mode 100644
index 0000000000..6a49d13af9
--- /dev/null
+++ b/docker/python/freeze_deps.py
@@ -0,0 +1,324 @@
+#!/usr/bin/env python3
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import argparse
+import importlib
+import pathlib
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import typing
+
+
+SECTION_RE = re.compile(r"\[([^]]+)\].*")
+
+
+def remove_sections(lines: typing.List[str], section_names: typing.List[str]) -> typing.List[str]:
+    """Remove .toml sections from a list of lines.
+
+    Parameters
+    ----------
+    lines : list[str]
+        A list containing the lines of the toml file.
+    section_names : list[str]
+        A list of names of sections which should be removed.
+
+    Returns
+    -------
+    (removed, insert_points):
+        A 2-tuple. `removed` is a new list of strings with those sections removed. `insert_points` is
+        a dict containing an entry for each section removed; key is the section name and value is the
+        index into `removed` where that section would have been.
+    """
+    removed = []
+    insert_points = {}
+    drop_line = False
+    for line in lines:
+        m = SECTION_RE.match(line)
+        if m:
+            drop_line = m.group(1) in section_names
+            insert_points[m.group(1)] = len(removed)
+
+        if not drop_line:
+            removed.append(line)
+
+    return removed, insert_points
+
+
+def write_dependencies(requirements_by_piece: dict, constraints: dict, output_f):
+    """Write the [tool.poetry.dependencies] section of pyproject.toml.
+
+    Parameters
+    ----------
+    requirements_by_piece : dict
+        The REQUIREMENTS_BY_PIECE dict from gen_requirements.py module.
+    constraints : dict
+        The CONSTRAINTS dict from gen_requirements.py module, updated with additional constraints from
+        ci-constraints.txt.
+    output_f : File
+        A file-like object where the section should be written.
+    """
+    output_f.write("[tool.poetry.dependencies]\n" 'python = ">=3.7, <3.9"\n')
+    core_packages = set(requirements_by_piece["core"][1])
+    dev_packages = set(requirements_by_piece["dev"][1])
+
+    for package, constraint in constraints.items():
+        if package in dev_packages:
+            continue
+
+        optional = package not in core_packages
+        marker = (
+            f', markers = "{constraint.environment_marker}"'
+            if constraint.environment_marker
+            else ""
+        )
+        output_f.write(
+            f"{package} = {{ version = \"{constraint.constraint or '*'}\", optional = {str(optional).lower()}{marker} }}\n"
+        )
+
+    output_f.write("\n")
+
+
+def write_dev_dependencies(requirements_by_piece: dict, constraints: dict, output_f):
+    """Write the [tool.poetry.dev-dependencies] section of pyproject.toml.
+
+    Parameters
+    ----------
+    requirements_by_piece : dict
+        The REQUIREMENTS_BY_PIECE dict from gen_requirements.py module.
+    constraints : dict
+        The CONSTRAINTS dict from gen_requirements.py module, updated with additional constraints from
+        ci-constraints.txt.
+    output_f : File
+        A file-like object where the section should be written.
+    """
+    output_f.write("[tool.poetry.dev-dependencies]\n")
+    dev_packages = set(requirements_by_piece["dev"][1])
+
+    for package, constraint in constraints.items():
+        if package not in dev_packages:
+            continue
+
+        output_f.write(f"{package} = \"{constraint.constraint or '*'}\"\n")
+
+    output_f.write("\n")
+
+
+def write_extras(requirements_by_piece: dict, constraints: dict, output_f):
+    """Write the [tool.poetry.extras] section of pyproject.toml.
+
+    Parameters
+    ----------
+    requirements_by_piece : dict
+        The REQUIREMENTS_BY_PIECE dict from gen_requirements.py module.
+    constraints : dict
+        The CONSTRAINTS dict from gen_requirements.py module, updated with additional constraints from
+        ci-constraints.txt.
+    output_f : File
+        A file-like object where the section should be written.
+    """
+    output_f.write("[tool.poetry.extras]\n")
+
+    for piece, (description, packages) in requirements_by_piece.items():
+        if piece in ("core", "dev"):
+            # These pieces do not need an extras declaration.
+            continue
+
+        output_f.write(f"# {description}\n")
+        package_list = ", ".join(f'"{p}"' for p in sorted(packages))
+        output_f.write(f"{piece} = [{package_list}]\n\n")
+
+    output_f.write("\n")
+
+
+# List of all the emitted sections in order they are to be emitted.
+SECTION_ORDER = ("tool.poetry.dependencies", "tool.poetry.dev-dependencies", "tool.poetry.extras")
+
+
+CI_CONSTRAINTS_RE = re.compile(r'(?P<package_name>[a-zA-Z0-9_-]+) = "(?P<version>[^"]+)".*')
+
+
+def generate_pyproject_toml(
+    ci_constraints_txt: pathlib.Path,
+    gen_requirements_py: pathlib.Path,
+    template_pyproject_toml: pathlib.Path,
+    output_pyproject_toml: pathlib.Path,
+):
+    """Generate poetry dependencies sections in pyproject.toml from gen_requirements.py.
+
+    Existing [tool.poetry.dev-dependencies], [tool.poetry.dependencies], and [tool.poetry.extras]
+    sections are overwritten.
+
+    Parameters
+    ----------
+    ci_constraints_txt : pathlib.Path
+        Path to ci-constraints.txt.
+    gen_requirements_py : pathlib.Path
+        Path to the python/gen_requirements.py file in TVM.
+    template_pyproject_toml : pathlib.Path
+        Path to a pyproject.toml whose [{dev-,}dependencies] sections should be replaced with those from
+        gen_requirements.py. In production, this is expected to be the checked-in pyproject.toml at
+        the root of the TVM repo.
+    output_pyproject_toml : pathlib.Path
+        Non-existent path to the revised pyproject.toml.
+    """
+    with open(template_pyproject_toml) as template_f:
+        pyproject_toml, insert_points = remove_sections(template_f, SECTION_ORDER)
+
+    insert_points = {s: insert_points.get(s, len(pyproject_toml)) for s in SECTION_ORDER}
+
+    sys.path.insert(0, str(gen_requirements_py.resolve().parent))
+    gen_requirements = importlib.import_module(gen_requirements_py.stem)
+    sys.path.pop(0)
+
+    constraints_list = []
+    for pkg, constraint in gen_requirements.CONSTRAINTS:
+        gen_requirements.parse_constraint_entry(pkg, constraint, None, constraints_list)
+
+    constraints = {r.package: r for r in constraints_list}
+    with open(ci_constraints_txt) as ci_constraints_f:
+        for i, line in enumerate(ci_constraints_f):
+            if not line.strip():
+                continue
+
+            m = CI_CONSTRAINTS_RE.match(line)
+            if not m:
+                if line.startswith("#"):
+                    continue
+                print(f"{ci_constraints_txt}: {i}: Malformed line {line}")
+                sys.exit(2)
+
+            package_name = m.group("package_name")
+            if package_name not in constraints:
+                print(
+                    f"{ci_constraints_txt}: {i}: Package {package_name} not listed in gen_requirements.py"
+                )
+                sys.exit(2)
+
+            constraint = constraints[package_name]
+            if constraint.constraint != "==*":
+                print(
+                    f"{ci_constraints_txt}: {i}: Package {package_name} already functionally constrained in gen_requirements.py"
+                )
+                sys.exit(2)
+
+            constraints[package_name] = gen_requirements.Requirement(
+                constraint.package, m.group("version"), constraint.environment_marker
+            )
+
+    stop_points = list(
+        sorted(
+            [(v, k) for k, v in insert_points.items()],
+            key=lambda x: (x[0], SECTION_ORDER.index(x[1])),
+        )
+    )
+    next_stop = stop_points.pop(0)
+    with open(output_pyproject_toml, "w") as output_f:
+
+        def _write(next_stop, i):
+            while next_stop[0] == i:
+                writer_function_name = (
+                    f"write_{next_stop[1][len('tool.poetry.'):].replace('-', '_')}"
+                )
+                globals()[writer_function_name](
+                    dict(gen_requirements.REQUIREMENTS_BY_PIECE), constraints, output_f
+                )
+                next_stop = stop_points.pop(0) if stop_points else (None, "")
+
+            return next_stop
+
+        for i, line in enumerate(pyproject_toml):
+            next_stop = _write(next_stop, i)
+            output_f.write(line)
+
+        next_stop = _write(next_stop, len(pyproject_toml))
+        assert next_stop[0] is None, f"Did not write all sections. Remaining: {next_stop}"
+
+
+def freeze_deps(output_pyproject_toml):
+    with open(output_pyproject_toml.parent / "poetry-lock.log", "w") as f:
+        # Disable parallel fetching which tends to result in "Connection aborted" errors.
+        # https://github.com/python-poetry/poetry/issues/3219
+        subprocess.check_call(
+            ["poetry", "config", "installer.parallel", "false"], cwd=output_pyproject_toml.parent
+        )
+        subprocess.check_call(
+            ["poetry", "lock", "-vv"],
+            stdout=f,
+            stderr=subprocess.STDOUT,
+            cwd=output_pyproject_toml.parent,
+        )
+
+
+REPO_ROOT = pathlib.Path(__file__).parent.parent
+
+
+def parse_args(argv: typing.List[str]) -> argparse.Namespace:
+    parser = argparse.ArgumentParser(
+        argv[0],
+        usage="Create a pyproject.toml containing the information in python/gen_requirements.py",
+    )
+    parser.add_argument(
+        "--ci-constraints",
+        type=pathlib.Path,
+        default=REPO_ROOT / "docker/ci-constraints.txt",
+        help=(
+            "Path to a file describing packages held back in "
+            "CI to make routine package updates possible."
+        ),
+    )
+    parser.add_argument(
+        "--gen-requirements-py",
+        type=pathlib.Path,
+        default=REPO_ROOT / "python" / "gen_requirements.py",
+        help="Path to python/gen_requirements.py in the TVM repo",
+    )
+    parser.add_argument(
+        "--template-pyproject-toml",
+        type=pathlib.Path,
+        help="Path to the pyproject.toml to use as a basis for the updated pyproject.toml.",
+    )
+    parser.add_argument(
+        "--output-base",
+        type=pathlib.Path,
+        help="Path where the updated pyproject.toml and poetry.lock should be written.",
+    )
+
+    return parser.parse_args(argv[1:])
+
+
+def main(argv: typing.List[str]):
+    args = parse_args(argv)
+
+    if args.output_base.exists():
+        shutil.rmtree(args.output_base)
+    args.output_base.mkdir(parents=True)
+
+    pyproject_toml = pathlib.Path(args.output_base) / "pyproject.toml"
+    generate_pyproject_toml(
+        args.ci_constraints, args.gen_requirements_py, args.template_pyproject_toml, pyproject_toml
+    )
+    with open(pyproject_toml) as f:
+        print(f.read())
+    freeze_deps(pyproject_toml)
+
+
+if __name__ == "__main__":
+    main(sys.argv)
diff --git a/pyproject.toml b/pyproject.toml
index 5cca711ddb..65444820e0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -46,3 +46,24 @@ exclude = '''
   )/
 )
 '''
+
+[tool.poetry]
+name = "apache-tvm"
+authors = []
+version = "0.8.0"
+description = "Open source Deep Learning compliation toolkit"
+
+[[tool.poetry.source]]
+name = "oneflow"
+url = "https://release.oneflow.info"
+secondary = true
+
+#[[tool.poetry.source]]
+#name = "onnx"
+#url = "https://download.pytorch.org/whl/cpu"
+#secondary = true
+
+[[tool.poetry.source]]
+name = "tensorflow-aarch64"
+url = "https://snapshots.linaro.org/ldcg/python-cache"
+secondary = true
diff --git a/python/gen_requirements.py b/python/gen_requirements.py
index 7e2c3e2186..446347cece 100755
--- a/python/gen_requirements.py
+++ b/python/gen_requirements.py
@@ -45,14 +45,15 @@ The data representing each piece is contained in the two maps below.
 
 import argparse
 import collections
+import dataclasses
 import os
 import re
 import textwrap
 import sys
-import typing
+from typing import Dict, List, Pattern, Tuple, Union
 
 
-RequirementsByPieceType = typing.List[typing.Tuple[str, typing.Tuple[str, typing.List[str]]]]
+RequirementsByPieceType = List[Tuple[str, Tuple[str, List[str]]]]
 
 
 # Maps named TVM piece (see description above) to a list of names of Python packages. Please use
@@ -85,6 +86,13 @@ REQUIREMENTS_BY_PIECE: RequirementsByPieceType = [
             ],
         ),
     ),
+    (
+        "gpu",
+        (
+            "Requirements for working with GPUs",
+            [],  # NOTE: tensorflow-gpu installed via environment marker.
+        ),
+    ),
     # Relay frontends.
     (
         "importer-caffe",
@@ -112,7 +120,17 @@ REQUIREMENTS_BY_PIECE: RequirementsByPieceType = [
     ("importer-darknet", ("Requirements for the DarkNet importer", ["opencv-python"])),
     (
         "importer-keras",
-        ("Requirements for the Keras importer", ["tensorflow", "tensorflow-estimator"]),
+        ("Requirements for the Keras importer", ["keras", "tensorflow", "tensorflow-estimator"]),
+    ),
+    (
+        "importer-oneflow",
+        (
+            "Requirements for the OneFlow importer",
+            [
+                "flowvision",
+                "oneflow",
+            ],
+        ),
     ),
     (
         "importer-onnx",
@@ -128,6 +146,15 @@ REQUIREMENTS_BY_PIECE: RequirementsByPieceType = [
             ],
         ),
     ),
+    (
+        "importer-mxnet",
+        (
+            "Requirements for the mxnet importer",
+            [
+                "mxnet",
+            ],
+        ),
+    ),
     (
         "importer-paddle",
         ("Requirements for the PaddlePaddle importer", ["paddlepaddle"]),
@@ -170,17 +197,6 @@ REQUIREMENTS_BY_PIECE: RequirementsByPieceType = [
             ],
         ),
     ),
-    # Vitis AI requirements
-    (
-        "vitis-ai",
-        (
-            "Requirements for the Vitis AI codegen",
-            [
-                "h5py",
-                "progressbar",
-            ],
-        ),
-    ),
     # XGBoost, useful for autotuning on some targets.
     (
         "xgboost",
@@ -202,28 +218,45 @@ REQUIREMENTS_BY_PIECE: RequirementsByPieceType = [
                 "astroid",  # pylint requirement, listed so a hard constraint can be included.
                 "autodocsumm",
                 "black",
+                "blocklint",
                 "commonmark",
                 "cpplint",
                 "docutils",
+                "flake8",
                 "image",
+                "jinja2",
                 "matplotlib",
+                "mypy",
                 "pillow",
                 "pylint",
                 "sphinx",
-                "sphinx_autodoc_annotation",
-                "sphinx_gallery",
-                "sphinx_rtd_theme",
+                "sphinx-autodoc-annotation",
+                "sphinx-gallery",
+                "sphinx-rtd-theme",
                 "types-psutil",
             ],
         ),
     ),
 ]
 
-ConstraintsType = typing.List[typing.Tuple[str, typing.Union[None, str]]]
+ConstraintsType = List[Tuple[str, Union[Tuple[str]]]]
 
-# Maps a named Python package (which should appear in REQUIREMENTS_BY_PIECE above) to a
-# semver or pip version constraint. Semver constraints are translated into requirements.txt-friendly
-# constraints.
+# Maps a named Python package (which should appear in REQUIREMENTS_BY_PIECE above) to one or more
+# constraint specifications matching the following form:
+#
+# [<replacement-package-name>]<constraint>[; <pep496 environment marker>]
+#
+# Where each field is defined as:
+# <replacement-package-name>: Valid only when <pep496 environment marker> is present. If given,
+#     uses this package name in place of the original when the environment marker condition is
+#     met.
+# <constraint>: A semantic version (semver.org) (expressed as "^a.b.c") or a pip version constarint.
+# <pep496 environment maker>: A PEP406-compatible environment marker specifying the conditions under
+#     which this conraint and package should be used.
+#
+# A few limitations on replacement-package-name:
+# 1. It can't be mentioned in REQUIRMENTS_BY_NAME.
+# 2. It can't be mentioned as <replacement-package-name> in a constraint for different package name.
 #
 # These constraints serve only to record technical reasons why a particular version can't be used.
 # They are the default install_requires used in setup.py. These can be further narrowed to restrict
@@ -234,74 +267,106 @@ ConstraintsType = typing.List[typing.Tuple[str, typing.Union[None, str]]]
 # 2. If TVM will functionally break against an old version of a dependency, specify a >= relation
 #    here. Include a comment linking to context or explaining why the constraint is in place.
 CONSTRAINTS = [
-    ("astroid", None),
-    ("attrs", None),
-    ("autodocsumm", None),
-    ("black", "==20.8b1"),
-    ("cloudpickle", None),
-    ("commonmark", ">=0.7.3"),  # From PR #213.
-    ("coremltools", None),
-    ("cpplint", None),
-    ("decorator", None),
+    ("astroid", []),
+    ("attrs", []),
+    ("autodocsumm", []),
+    ("black", []),
+    ("blocklint", []),
+    ("cloudpickle", []),
+    ("commonmark", [">=0.7.3"]),  # From PR #213.
+    ("coremltools", []),
+    ("cpplint", []),
+    ("decorator", []),
     (
         "docutils",
-        "<0.17",
+        [">=0.11,<0.17"],
     ),  # Work around https://github.com/readthedocs/sphinx_rtd_theme/issues/1115
-    ("ethos-u-vela", "==3.2.0"),
-    ("future", None),
-    ("h5py", "==2.10.0"),
-    ("image", None),
-    ("matplotlib", None),
-    ("numpy", None),
-    ("onnx", None),
-    ("onnxoptimizer", None),
-    ("onnxruntime", None),
-    ("opencv-python", None),
-    ("paddlepaddle", None),
-    ("pillow", None),
-    ("progressbar", None),
-    ("protobuf", None),
-    ("psutil", None),
-    ("pylint", None),
-    ("scikit-image", None),
-    ("scipy", None),
-    ("six", None),
-    ("sphinx", None),
-    ("sphinx_autodoc_annotation", None),
-    ("sphinx_gallery", None),
-    ("sphinx_rtd_theme", None),
-    ("synr", "==0.6.0"),
-    ("tensorflow", None),
-    ("tensorflow-estimator", None),
-    ("tflite", None),
-    ("torch", None),
-    ("torchvision", None),
-    ("tornado", None),
-    ("xgboost", ">=1.1.0,<1.6.0"),  # From PR #4953 & Issue #12009
+    ("ethos-u-vela", ["==3.2.0"]),
+    ("flake8", []),
+    ("flowvision", []),
+    ("future", []),
+    ("image", []),
+    ("jinja2", []),
+    ("keras", []),
+    ("matplotlib", []),
+    ("mxnet", []),
+    ("mypy", []),
+    ("numpy", []),
+    ("oneflow", []),
+    ("onnx", []),
+    ("onnxoptimizer", []),
+    ("onnxruntime", []),
+    ("opencv-python", []),
+    (
+        "paddlepaddle",
+        ["==*; 'importer-tensorflow' not in extra and 'importer-tflite' not in extra"],
+    ),
+    ("pillow", []),
+    ("protobuf", []),
+    ("psutil", []),
+    ("pylint", []),
+    ("scikit-image", []),
+    ("scipy", []),
+    ("six", []),
+    ("sphinx", []),
+    ("sphinx-autodoc-annotation", []),
+    ("sphinx-gallery", []),
+    ("sphinx-rtd-theme", []),
+    ("synr", ["==0.6.0"]),
+    (
+        "tensorflow",
+        [
+            "tensorflow==*; platform_machine not in 'aarch64' and 'gpu' not in extra and 'importer-paddle' not in extra",
+            "tensorflow-aarch64==*; platform_machine in 'aarch64' and 'importer-paddle' not in extra",
+            "tensorflow-gpu==*; platform_machine not in 'aarch64' and 'gpu' in extra and 'importer-paddle' not in extra",
+        ],
+    ),
+    ("tensorflow-estimator", []),
+    ("tflite", []),
+    ("torch", []),
+    ("torchvision", []),
+    ("tornado", []),
+    ("xgboost", [">=1.1.0,<1.6.0"]),  # From PR #4953 & Issue #12009
 ]
 
+
 ################################################################################
 # End of configuration options.
 ################################################################################
 
 
 # Required keys in REQUIREMENTS_BY_PIECE.
-REQUIRED_PIECES: typing.List[str] = ["core", "dev"]
+REQUIRED_PIECES: List[str] = ["core", "dev"]
 
 # Regex to validates piece names.
-PIECE_REGEX: typing.Pattern = re.compile(r"^[a-z0-9][a-z0-9-]*", re.IGNORECASE)
+PIECE_REGEX: Pattern = re.compile(r"^[a-z0-9][a-z0-9-]*", re.IGNORECASE)
 
 # Regex to match a constraint specification. Multiple constraints are not supported.
-CONSTRAINT_REGEX: typing.Pattern = re.compile(r"(?:\^|\<|(?:~=)|(?:<=)|(?:==)|(?:>=)|\>)[^<>=\^,]+")
+CONSTRAINT_REGEX: Pattern = re.compile(r"(?:\^|\<|(?:~=)|(?:<=)|(?:==)|(?:>=)|\>)[^<>=\^,;]+")
 
 # Regex for parsing semantic versions. See
 # https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
-SEMVER_REGEX: typing.Pattern = re.compile(
+SEMVER_REGEX: Pattern = re.compile(
     r"^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"
 )
 
 
-def validate_requirements_by_piece() -> typing.List[str]:
+CONSTRAINT_SPEC_REGEX: Pattern = re.compile(
+    r"(?P<package>[a-z0-9_-]+)?"
+    + r"(?P<constraint>(?:"
+    + CONSTRAINT_REGEX.pattern
+    + r")"
+    + r"|(?:"
+    + SEMVER_REGEX.pattern
+    + r")"
+    + r"|(?:==\*))"
+    + r"(?:;[\s]*(?P<environment_marker>.+))?"
+)
+
+print("CSR", CONSTRAINT_SPEC_REGEX.pattern)
+
+
+def validate_requirements_by_piece() -> List[str]:
     """Validate REQUIREMENTS_BY_PIECE, returning a list of problems.
 
     Returns
@@ -392,9 +457,7 @@ def validate_requirements_by_piece() -> typing.List[str]:
     return problems
 
 
-def parse_semver(
-    package: str, constraint: str, problems: typing.List[str]
-) -> typing.Tuple[typing.List[str], int, int]:
+def parse_semver(package: str, constraint: str, problems: List[str]) -> Tuple[List[str], int, int]:
     """Parse a semantic versioning constraint of the form "^X.[.Y[.Z[...]]]]"
 
     Parameters
@@ -447,7 +510,113 @@ def parse_semver(
     return min_ver_parts, 0, 0
 
 
-def validate_constraints() -> typing.List[str]:
+@dataclasses.dataclass(eq=True, frozen=True)
+class Requirement:
+    package: str
+    constraint: str
+    environment_marker: Union[str, None]
+
+    def to_requirement(self):
+        return f'{self.package}{self.constraint}{self.environment_marker or ""}'
+
+
+def semver_to_requirements(
+    dep: str, constraint: str, problems: List[str], joined_deps: Union[None, List[Requirement]]
+):
+    """Convert a SemVer-style constraint to a setuptools-compatible constraint.
+
+    Parameters
+    ----------
+    dep : str
+        Name of the PyPI package to depend on.
+    constraint : str
+        The SemVer constraint, of the form "^<semver constraint>"
+    problems : List[str]
+        A list of the validation problems encountered when parsing this semver.
+    joined_deps : Union[None, List[str]]
+        Either:
+         1. A list of strings, each a setuptools-compatible constraint which could be written to
+            a line in requirements.txt. The converted constraint is appended to this list.
+         2. None, in which case only validation is performed.
+    """
+    min_ver_parts, fixed_index, fixed_part = parse_semver(dep, constraint, problems)
+    if joined_deps is not None:
+        text_problems = "\n" + "\n".join(f" * {p}" for p in problems)
+        assert (
+            not problems
+        ), f"should not happen: validated semver {constraint} parses with problems:{text_problems}"
+
+        max_ver_parts = (
+            min_ver_parts[:fixed_index]
+            + [str(fixed_part + 1)]
+            + ["0" for _ in min_ver_parts[fixed_index + 1 :]]
+        )
+        joined_deps.append(
+            Requirement(
+                package=dep, constraint=f'{".".join(min_ver_parts)},<{".".join(max_ver_parts)}'
+            )
+        )
+
+
+def parse_constraint_entry(
+    package: str, constraints: str, problems: List[str], requirements: Union[None, List[str]]
+):
+    """Parse an entry in CONSTRAINTS into requirements.txt entries.
+
+    When requirements is None, assert-fails if any validation problems occur.
+
+    Parameters
+    ----------
+    package : str
+        The key of this entry in CONSTRAINTS.
+    constraints : str
+        The value of
+        Either the value in CONSTRAINTS (if said value is a str) or one item from the value (if said
+        value is a list of strings) which should be converted into a requirement.
+    problems : List[str]
+        A list of the validation problems encountered when parsing the entry.
+    requirements : Union[None, List[str]]
+        Either:
+         1. A list of strings, each a setuptools-compatible constraint which could be written to a
+            line in requirements.txt The converted constraint is appended to this list.
+         2. None, in which case the constraint will be only validated.
+    """
+
+    def _parse_one(c):
+        print("PARSE_ONE", package, not c)
+        if not c:
+            if requirements is not None:
+                requirements.append(
+                    Requirement(package=package, constraint="==*", environment_marker=None)
+                )
+            return
+
+        m = CONSTRAINT_SPEC_REGEX.match(c)
+        if m is None:
+            problems.append(f'{package}: constraint "{c}" does not look like a valid constraint')
+
+        if c[0] == "^":
+            semver_to_requirements(package, c, problems, requirements)
+        elif requirements is not None:
+            groups = m.groupdict()
+            requirement_package = groups.get("package") or package
+            requirements.append(
+                Requirement(
+                    package=requirement_package,
+                    constraint=groups.get("constraint", "==*"),
+                    environment_marker=groups.get("environment_marker"),
+                )
+            )
+
+    if not constraints:
+        _parse_one(constraints)
+        return
+
+    for constraint in constraints:
+        _parse_one(constraint)
+
+
+def validate_constraints() -> List[str]:
     """Validate CONSTRAINTS, returning a list of problems found.
 
     Returns
@@ -477,13 +646,7 @@ def validate_constraints() -> typing.List[str]:
         if constraint is None:  # None is just a placeholder that allows for comments.
             continue
 
-        if not CONSTRAINT_REGEX.match(constraint):
-            problems.append(
-                f'{package}: constraint "{constraint}" does not look like a valid constraint'
-            )
-
-        if constraint.startswith("^"):
-            parse_semver(package, constraint, problems)
+        parse_constraint_entry(package, constraint, problems, None)
 
     all_constrained_packages = [p for (p, _) in CONSTRAINTS]
     sorted_constrained_packages = list(sorted(all_constrained_packages))
@@ -499,7 +662,7 @@ class ValidationError(Exception):
     """Raised when a validation error occurs."""
 
     @staticmethod
-    def format_problems(config: str, problems: typing.List[str]) -> str:
+    def format_problems(config: str, problems: List[str]) -> str:
         """Format a list of problems with a global config variable into human-readable output.
 
         Parameters
@@ -527,7 +690,7 @@ class ValidationError(Exception):
 
         return "\n".join(formatted)
 
-    def __init__(self, config: str, problems: typing.List[str]):
+    def __init__(self, config: str, problems: List[str]):
         """Describes an error that occurs validating one of the global config variables.
 
         Parameters
@@ -551,35 +714,7 @@ def validate_or_raise():
         raise ValidationError("CONSTRAINTS", problems)
 
 
-def semver_to_requirements(dep: str, constraint: str, joined_deps: typing.List[str]):
-    """Convert a SemVer-style constraint to a setuptools-compatible constraint.
-
-    Parameters
-    ----------
-    dep : str
-        Name of the PyPI package to depend on.
-    constraint : str
-        The SemVer constraint, of the form "^<semver constraint>"
-    joined_deps : list[str]
-        A list of strings, each a setuptools-compatible constraint which could be written to
-        a line in requirements.txt. The converted constraint is appended to this list.
-    """
-    problems: typing.List[str] = []
-    min_ver_parts, fixed_index, fixed_part = parse_semver(dep, constraint, problems)
-    text_problems = "\n" + "\n".join(f" * {p}" for p in problems)
-    assert (
-        not problems
-    ), f"should not happen: validated semver {constraint} parses with problems:{text_problems}"
-
-    max_ver_parts = (
-        min_ver_parts[:fixed_index]
-        + [str(fixed_part + 1)]
-        + ["0" for _ in min_ver_parts[fixed_index + 1 :]]
-    )
-    joined_deps.append(f'{dep}>={".".join(min_ver_parts)},<{".".join(max_ver_parts)}')
-
-
-def join_requirements() -> typing.Dict[str, typing.Tuple[str, typing.List[str]]]:
+def join_requirements() -> Dict[str, Tuple[str, List[str]]]:
     """Validate, then join REQUIRMENTS_BY_PIECE against CONSTRAINTS and return the result.
 
     Returns
@@ -597,14 +732,7 @@ def join_requirements() -> typing.Dict[str, typing.Tuple[str, typing.List[str]]]
         joined_deps = []
         for d in deps:
             constraint = constraints_map.get(d.lower())
-            if constraint is None:
-                joined_deps.append(d)
-                continue
-
-            if constraint[0] == "^":
-                semver_to_requirements(d, constraint, joined_deps)
-            else:
-                joined_deps.append(f"{d}{constraint}")
+            parse_constraint_entry(d, constraint, None, joined_deps)
 
         if piece != "dev":
             all_deps.update(joined_deps)
@@ -613,7 +741,7 @@ def join_requirements() -> typing.Dict[str, typing.Tuple[str, typing.List[str]]]
 
     to_return["all-prod"] = (
         "Combined dependencies for all TVM pieces, excluding dev",
-        list(sorted(all_deps)),
+        list(sorted(all_deps, key=lambda r: r.package)),
     )
 
     return to_return
@@ -648,7 +776,7 @@ def join_and_write_requirements(args: argparse.Namespace):
                 f"# {description}{os.linesep}"
             )
             for d in deps:
-                f.write(f"{d}{os.linesep}")
+                f.write(f"{d!s}{os.linesep}")
 
 
 def parse_args() -> argparse.Namespace:
diff --git a/tests/lint/check_file_type.py b/tests/lint/check_file_type.py
index 099ba3c3fa..55b4316bc4 100644
--- a/tests/lint/check_file_type.py
+++ b/tests/lint/check_file_type.py
@@ -153,6 +153,7 @@ ALLOW_SPECIFIC_FILE = {
     "apps/microtvm/reference-vm/base-box/Vagrantfile.packer-template",
     # Hexagon
     "src/runtime/hexagon/rpc/android_bash.sh.template",
+    "docker/python/build/poetry.lock",
 }