diff --git a/pyodide_lock/__init__.py b/pyodide_lock/__init__.py index c6d10ba..3d51ddd 100644 --- a/pyodide_lock/__init__.py +++ b/pyodide_lock/__init__.py @@ -1,8 +1,9 @@ -from .spec import PackageSpec, PyodideLockSpec +from .spec import InfoSpec, PackageSpec, PyodideLockSpec from .utils import parse_top_level_import_name __all__ = [ "PyodideLockSpec", "PackageSpec", + "InfoSpec", "parse_top_level_import_name", ] diff --git a/pyodide_lock/cli.py b/pyodide_lock/cli.py new file mode 100644 index 0000000..af3c28e --- /dev/null +++ b/pyodide_lock/cli.py @@ -0,0 +1,57 @@ +from pathlib import Path + +import typer + +from .spec import PyodideLockSpec +from .utils import add_wheels_to_spec + +main = typer.Typer(help="manipulate pyodide-lock.json lockfiles.") + + +@main.command() +def add_wheels( + wheels: list[Path], + ignore_missing_dependencies: bool = typer.Option( + help="If this is true, dependencies " + "which are not in the original lockfile or " + "the added wheels will be added to the lockfile. " + "Warning: This will allow a broken lockfile to " + "be created.", + default=False, + ), + input: Path = typer.Option( + help="Source lockfile", default=Path("pyodide-lock.json") + ), + output: Path = typer.Option( + help="Updated lockfile", default=Path("pyodide-lock-new.json") + ), + base_path: Path = typer.Option( + help="Base path for wheels - wheel file " + "names will be created relative to this path.", + default=None, + ), + wheel_url: str = typer.Option( + help="Base url which will be appended to the wheel location." + "Use this if you are hosting these wheels on a different " + "server to core pyodide packages", + default="", + ), +): + """Add a set of package wheels to an existing pyodide-lock.json and + write it out to pyodide-lock-new.json + + Each package in the wheel will be added to the output lockfile, + including resolution of dependencies in the lock file. By default + this will fail if a dependency isn't available in either the + existing lock file, or in the set of new wheels. + + """ + sp = PyodideLockSpec.from_json(input) + add_wheels_to_spec( + sp, + wheels, + base_path=base_path, + base_url=wheel_url, + ignore_missing_dependencies=ignore_missing_dependencies, + ) + sp.to_json(output) diff --git a/pyodide_lock/spec.py b/pyodide_lock/spec.py index c2f0c58..b91a4ec 100644 --- a/pyodide_lock/spec.py +++ b/pyodide_lock/spec.py @@ -2,13 +2,7 @@ from pathlib import Path from typing import Literal -from pydantic import BaseModel, Extra - -from .utils import ( - _generate_package_hash, - _wheel_depends, - parse_top_level_import_name, -) +from pydantic import BaseModel, Extra, Field class InfoSpec(BaseModel): @@ -24,7 +18,9 @@ class Config: class PackageSpec(BaseModel): name: str version: str - file_name: str + file_name: str = Field( + description="Path (or URL) to wheel.", format="uri-reference" + ) install_dir: str sha256: str = "" package_type: Literal[ @@ -39,41 +35,6 @@ class PackageSpec(BaseModel): class Config: extra = Extra.forbid - @classmethod - def from_wheel( - cls, - path: Path, - marker_env: None | dict[str, str] = None, - ) -> "PackageSpec": - """Build a package spec from an on-disk wheel. - - This currently assumes a "simple" noarch wheel: more complex packages - may require further postprocessing. - """ - import pkginfo - from packaging.utils import canonicalize_name - - metadata = pkginfo.get_metadata(str(path)) - - if not metadata: - raise RuntimeError(f"Could not parse wheel metadata from {path.name}") - - return PackageSpec( - name=canonicalize_name(metadata.name), - version=metadata.version, - file_name=path.name, - sha256=_generate_package_hash(path), - package_type="package", - install_dir="site", - imports=parse_top_level_import_name(path), - depends=_wheel_depends(metadata, marker_env), - ) - - def update_sha256(self, path: Path) -> "PackageSpec": - """Update the sha256 hash for a package.""" - self.sha256 = _generate_package_hash(path) - return self - class PyodideLockSpec(BaseModel): """A specification for the pyodide-lock.json file.""" diff --git a/pyodide_lock/utils.py b/pyodide_lock/utils.py index dfbefd3..a201850 100644 --- a/pyodide_lock/utils.py +++ b/pyodide_lock/utils.py @@ -4,10 +4,14 @@ import sys import zipfile from collections import deque +from functools import cache from pathlib import Path -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING # + +from .spec import InfoSpec, PackageSpec, PyodideLockSpec if TYPE_CHECKING: + from packaging.requirements import Requirement from pkginfo import Distribution logger = logging.getLogger(__name__) @@ -41,10 +45,12 @@ def parse_top_level_import_name(whlfile: Path) -> list[str] | None: whlzip = zipfile.Path(whlfile) - # If there is no top_level.txt file, we will find top level imports by + # We will find top level imports by # 1) a python file on a top-level directory # 2) a sub directory with __init__.py # following: https://github.com/pypa/setuptools/blob/d680efc8b4cd9aa388d07d3e298b870d26e9e04b/setuptools/discovery.py#L122 + # - n.b. this is more reliable than using top-level.txt which is + # sometimes broken top_level_imports = [] for subdir in whlzip.iterdir(): if subdir.is_file() and subdir.name.endswith(".py"): @@ -52,7 +58,6 @@ def parse_top_level_import_name(whlfile: Path) -> list[str] | None: elif subdir.is_dir() and _valid_package_name(subdir.name): if _has_python_file(subdir): top_level_imports.append(subdir.name) - if not top_level_imports: logger.warning( f"WARNING: failed to parse top level import name from {whlfile}." @@ -97,36 +102,288 @@ def _generate_package_hash(full_path: Path) -> str: return sha256_hash.hexdigest() -def _wheel_depends( - metadata: "Distribution", marker_env: None | dict[str, str] = None -) -> list[str]: - """Get the normalized runtime distribution dependencies from wheel metadata. +def _get_marker_environment( + platform: str, version: str, arch: str, python: str +) -> dict[str, str]: + """ + Get the marker environment for this pyodide-lock file. If running + inside pyodide it returns the current marker environment. + """ + if "pyodide" in sys.modules: + from packaging.markers import default_environment + + return default_environment() + else: + marker_env = _PYODIDE_MARKER_ENV.copy() + from packaging.version import parse as version_parse - ``marker_env`` is an optional dictionary of platform information, used to find - platform-specific requirements as per PEP 508. + target_python = version_parse(python) + match = re.match("([^_]+)_(.*)", platform) + if match is not None: + marker_env["sys_platform"] = match.group(1) + marker_env["platform_release"] = match.group(2) + marker_env["implementation_version"] = python + marker_env["python_full_version"] = python + marker_env["python_version"] = f"{target_python.major}.{target_python.minor}" + marker_env["platform_machine"] = arch + return marker_env - https://peps.python.org/pep-0508 - An accurate enumeration can be generated inside the target pyodide environment - such as the example below: +@cache +def _wheel_metadata(path: Path) -> "Distribution": + """Cached wheel metadata to save opening the file multiple times""" + from pkginfo import get_metadata - .. code: + metadata = get_metadata(str(path)) + return metadata - from packaging.markers import default_environment - print(default_enviroment()) - """ +def _wheel_depends(metadata: "Distribution") -> list["Requirement"]: + """Get distribution dependencies from wheel metadata.""" from packaging.requirements import Requirement + + depends: list[Requirement] = [] + + for dep_str in metadata.requires_dist: + req = Requirement(dep_str) + depends.append(req) + + return depends + + +def add_wheels_to_spec( + lock_spec: PyodideLockSpec, + wheel_files: list[Path], + base_path: Path | None = None, + base_url: str = "", + ignore_missing_dependencies: bool = False, +) -> PyodideLockSpec: + """Add a list of wheel files to this pyodide-lock.json and return a + new PyodideLockSpec + + Parameters: + wheel_files : list[Path] + A list of wheel files to import. + base_path : Path | None, optional + Filenames are stored relative to this base path. By default the + filename is stored relative to the path of the first wheel file + in the list. + base_url : str, optional + The base URL stored in the pyodide-lock.json. By default this + is empty which means that wheels must be stored in the same folder + as the core pyodide packages you are using. If you want to store + your custom wheels somewhere else, set this base_url to point to it. + ignore_missing_dependencies: bool, optional + If this is set to True, any dependencies not found in the lock file + or the set of wheels being added will be added to the spec. This is + not 100% reliable, because it ignores any extras and does not do any + sub-dependency or version resolution. + """ + new_spec = lock_spec.copy(deep=True) + if not wheel_files: + return new_spec + wheel_files = [f.resolve() for f in wheel_files] + if base_path is None: + base_path = wheel_files[0].parent + else: + base_path = base_path.resolve() + + new_packages = {} + for f in wheel_files: + spec = package_spec_from_wheel(f, info=lock_spec.info) + + new_packages[spec.name] = spec + + _fix_new_package_deps(lock_spec, new_packages, ignore_missing_dependencies) + _set_package_paths(new_packages, base_path, base_url) + new_spec.packages |= new_packages + return new_spec + + +def _fix_new_package_deps( + lock_spec: PyodideLockSpec, + new_packages: dict[str, PackageSpec], + ignore_missing_dependencies: bool, +): + # now fix up the dependencies for each of our new packages + # n.b. this assumes existing packages have correct dependencies, + # which is probably a good assumption. from packaging.utils import canonicalize_name - depends: list[str] = [] + requirements_with_extras = [] + marker_environment = _get_marker_environment(**lock_spec.info.dict()) + for package in new_packages.values(): + # add any requirements to the list of packages + our_depends = [] + wheel_file = package.file_name + metadata = _wheel_metadata(wheel_file) + requirements = _wheel_depends(metadata) + for r in requirements: + req_marker = r.marker + req_name = canonicalize_name(r.name) + if req_marker is not None: + if not req_marker.evaluate(marker_environment): + # not used in pyodide / emscripten + # or optional requirement + continue + if r.extras: + # this requirement has some extras, we need to check + # that the required package depends on these extras also. + requirements_with_extras.append(r) + if req_name in new_packages or req_name in lock_spec.packages: + our_depends.append(req_name) + elif ignore_missing_dependencies: + our_depends.append(req_name) + else: + raise RuntimeError( + f"Requirement {req_name} from {r} is not in this distribution." + ) + package.depends = our_depends + while len(requirements_with_extras) != 0: + extra_req = requirements_with_extras.pop() + requirements_with_extras.extend( + _fix_extra_dep( + lock_spec, extra_req, new_packages, ignore_missing_dependencies + ) + ) - env = dict({} if "pyodide" in sys.modules else _PYODIDE_MARKER_ENV) - env.update(marker_env or {}) - for dep_str in metadata.requires_dist: - req = Requirement(re.sub(r";$", "", dep_str)) - if req.marker is None or req.marker.evaluate(env): - depends += [canonicalize_name(req.name)] +# When requirements have extras, we need to make sure that the +# required package includes the dependencies for that extra. +# This is because extras aren't supported in pyodide-lock +def _fix_extra_dep( + lock_spec: PyodideLockSpec, + extra_req: "Requirement", + new_packages: dict[str, PackageSpec], + ignore_missing_dependencies: bool, +) -> list["Requirement"]: + from packaging.utils import canonicalize_name + + requirements_with_extras = [] + + marker_environment = _get_marker_environment(**lock_spec.info.dict()) + extra_package_name = canonicalize_name(extra_req.name) + if extra_package_name not in new_packages: + return [] + package = new_packages[extra_package_name] + our_depends = package.depends + wheel_file = package.file_name + metadata = _wheel_metadata(wheel_file) + requirements = _wheel_depends(metadata) + for extra in extra_req.extras: + this_marker_env = marker_environment.copy() + this_marker_env["extra"] = extra + + for r in requirements: + req_marker = r.marker + req_name = canonicalize_name(r.name) + if req_name not in our_depends: + if req_marker is None: + # no marker - this will have been processed above + continue + if req_marker.evaluate(this_marker_env): + if req_name in new_packages or req_name in lock_spec.packages: + our_depends.append(req_name) + if r.extras: + requirements_with_extras.append(r) + elif ignore_missing_dependencies: + our_depends.append(req_name) + else: + raise RuntimeError( + f"Requirement {req_name} is not in this distribution." + ) + package.depends = our_depends + return requirements_with_extras + + +def _set_package_paths( + new_packages: dict[str, PackageSpec], base_path: Path, base_url: str +): + for p in new_packages.values(): + current_path = Path(p.file_name) + relative_path = current_path.relative_to(base_path) + p.file_name = base_url + str(relative_path) + + +def _check_wheel_compatible(path: Path, info: InfoSpec) -> None: + from packaging.utils import ( + InvalidWheelFilename, + parse_wheel_filename, + ) + from packaging.version import InvalidVersion + from packaging.version import parse as version_parse + + target_python = version_parse(info.python) + target_platform = info.platform + "_" + info.arch + try: + (name, version, build_number, tags) = parse_wheel_filename(str(path.name)) + except (InvalidWheelFilename, InvalidVersion) as e: + raise RuntimeError(f"Wheel filename {path.name} is not valid") from e + python_binary_abi = f"cp{target_python.major}{target_python.minor}" + tags = list(tags) + + tag_match = False + for t in tags: + # abi should be + if ( + t.abi == python_binary_abi + and t.interpreter == python_binary_abi + and t.platform == target_platform + ): + tag_match = True + elif t.abi == "none" and t.platform == "any": + match = re.match(rf"py{target_python.major}(\d*)", t.interpreter) + if match: + subver = match.group(1) + if len(subver) == 0 or int(subver) <= target_python.minor: + tag_match = True + if not tag_match: + raise RuntimeError( + f"Package tags for {path} don't match Python version in lockfile:" + f"Lockfile python {target_python.major}.{target_python.minor}" + f"on platform {target_platform} ({python_binary_abi})" + ) + + +def package_spec_from_wheel(path: Path, info: InfoSpec) -> PackageSpec: + """Build a package spec from an on-disk wheel. + + Warning - to reliably handle dependencies, we need: + 1) To have access to all the wheels being added at once (to handle extras) + 2) To know whether dependencies are available in the combined lockfile. + 3) To fix up wheel urls and paths consistently + + This is called by add_wheels_to_spec + """ + from packaging.utils import ( + canonicalize_name, + ) + + path = path.absolute() + # throw an error if this is an incompatible wheel + + _check_wheel_compatible(path, info) + metadata = _wheel_metadata(path) + + if not metadata: + raise RuntimeError(f"Could not parse wheel metadata from {path.name}") + + # returns a draft PackageSpec with: + # 1) absolute path to wheel, + # 2) empty dependency list + return PackageSpec( + name=canonicalize_name(metadata.name), + version=metadata.version, + file_name=str(path), + sha256=_generate_package_hash(path), + package_type="package", + install_dir="site", + imports=parse_top_level_import_name(path), + depends=[], + ) + - return sorted(set(depends)) +def update_package_sha256(spec: PackageSpec, path: Path) -> "PackageSpec": + """Update the sha256 hash for a package.""" + spec.sha256 = _generate_package_hash(path) + return spec diff --git a/pyproject.toml b/pyproject.toml index a9c5f26..888e63d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,9 @@ classifiers = [ dynamic = ["version"] [project.optional-dependencies] +cli = [ + "typer", +] wheel = [ "pkginfo", "packaging", @@ -30,9 +33,11 @@ dev = [ "pytest", "pytest-cov", "build", + "typer", # from wheel "pkginfo", "packaging", + "wheel" ] [project.urls] @@ -76,3 +81,6 @@ select = [ [tool.pytest.ini_options] addopts = ''' --doctest-modules''' + +[project.entry-points."pyodide.cli"] +lockfile = "pyodide_lock.cli:main" diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..602fe0a --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,161 @@ +import json +from copy import deepcopy +from dataclasses import asdict, dataclass +from pathlib import Path +from tempfile import TemporaryDirectory + +import build +import pytest +from packaging.utils import canonicalize_name + +from pyodide_lock import PyodideLockSpec +from pyodide_lock.utils import _get_marker_environment + +LOCK_EXAMPLE = { + "info": { + "arch": "wasm32", + "platform": "emscripten_3_1_39", + "version": "0.24.0.dev0", + "python": "3.11.3", + }, + "packages": { + "numpy": { + "name": "numpy", + "version": "1.24.3", + "file_name": "numpy-1.24.3-cp311-cp311-emscripten_3_1_39_wasm32.whl", + "install_dir": "site", + "sha256": ( + "513af43ffb1f7d507c8d879c9f7e5" "d6c789ad21b6a67e5bca1d7cfb86bf8640f" + ), + "imports": ["numpy"], + "depends": [], + } + }, +} + +# marker environment for testing +_ENV = _get_marker_environment(**LOCK_EXAMPLE["info"]) # type:ignore[arg-type] +# marker environment for testing, filtered only to numerical values +_ENV_NUM = {k: v for k, v in _ENV.items() if v[0] in "0123456789"} + +MARKER_EXAMPLES_NOT_NEEDED = ( + [ + 'requests [security,tests] >= 2.8.1, == 2.8.* ; python_version < "2.7"', + 'argparse;python_version<"2.7"', + ] + + [f'Not.expected ; {k} != "{v}"' for k, v in _ENV.items()] + + [f'Not.expected ; {k} > "{v}"' for k, v in _ENV_NUM.items()] +) + + +MARKER_EXAMPLES_NEEDED = ( + [ + 'a;python_version>="3.5"', + 'b;sys_platform=="emscripten"', + ] + + [f'c_{k}; {k} == "{v}"' for k, v in _ENV.items()] + + [f'd_{k} ; {k} <= "{v}"' for k, v in _ENV_NUM.items()] +) + + +@pytest.fixture +def marker_examples_needed(): + return MARKER_EXAMPLES_NEEDED + + +@pytest.fixture +def marker_examples_not_needed(): + return MARKER_EXAMPLES_NOT_NEEDED + + +@pytest.fixture +def example_lock_data(): + return deepcopy(LOCK_EXAMPLE) + + +@pytest.fixture +def example_lock_spec(): + return PyodideLockSpec(**deepcopy(LOCK_EXAMPLE)) + + +# build a wheel +def make_test_wheel( + dir: Path, + package_name: str, + deps: list[str] | None = None, + optional_deps: dict[str, list[str]] | None = None, + modules: list[str] | None = None, +): + package_dir = dir / package_name + package_dir.mkdir() + if not modules: + modules = [canonicalize_name(package_name).replace("-", "_")] + for m in modules: + (package_dir / f"{m}.py").write_text("") + toml = package_dir / "pyproject.toml" + if deps is None: + deps = [] + + all_deps = json.dumps(deps) + if optional_deps: + all_optional_deps = "[project.optional-dependencies]\n" + "\n".join( + [x + "=" + json.dumps(optional_deps[x]) for x in optional_deps.keys()] + ) + else: + all_optional_deps = "" + toml_text = f""" +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "{package_name}" +description = "{package_name} example package" +version = "1.0.0" +authors = [ + {{ name = "Bob Jones", email = "bobjones@nowhere.nowhere" }} +] +dependencies = { + all_deps +} + +{ all_optional_deps } + +""" + toml.write_text(toml_text) + builder = build.ProjectBuilder(package_dir) + return Path(builder.build("wheel", dir / "dist")) + + +@pytest.fixture(scope="module") +def test_wheel_list(): + @dataclass + class TestWheel: + package_name: str + modules: list[str] | None = None + deps: list[str] | None = None + optional_deps: dict[str, list[str]] | None = None + + # a set of test wheels - note that names are non-canonicalized + # deliberately to test this + test_wheels: list[TestWheel] = [ + TestWheel(package_name="py-one", modules=["one"]), + TestWheel(package_name="NEeds-one", deps=["py_one"]), + TestWheel(package_name="nEEds-one-opt", optional_deps={"with_one": ["py_One"]}), + TestWheel( + package_name="test-extra_dependencies", deps=["needs-one-opt[with_one]"] + ), + TestWheel(package_name="failure", deps=["two"]), + TestWheel( + package_name="markers_not_needed_test", deps=MARKER_EXAMPLES_NOT_NEEDED + ), + TestWheel(package_name="markers_needed_test", deps=MARKER_EXAMPLES_NEEDED), + ] + + with TemporaryDirectory() as tmpdir: + path_temp = Path(tmpdir) + path_temp.mkdir(exist_ok=True) + all_wheels = [] + for wheel_data in test_wheels: + all_wheels.append(make_test_wheel(path_temp, **asdict(wheel_data))) + yield all_wheels diff --git a/tests/test_spec.py b/tests/test_spec.py index 7d29ae2..0a78576 100644 --- a/tests/test_spec.py +++ b/tests/test_spec.py @@ -7,31 +7,10 @@ from pyodide_lock import PyodideLockSpec from pyodide_lock.spec import InfoSpec, PackageSpec +from pyodide_lock.utils import update_package_sha256 DATA_DIR = Path(__file__).parent / "data" -LOCK_EXAMPLE = { - "info": { - "arch": "wasm32", - "platform": "emscripten_3_1_39", - "version": "0.24.0.dev0", - "python": "3.11.3", - }, - "packages": { - "numpy": { - "name": "numpy", - "version": "1.24.3", - "file_name": "numpy-1.24.3-cp311-cp311-emscripten_3_1_39_wasm32.whl", - "install_dir": "site", - "sha256": ( - "513af43ffb1f7d507c8d879c9f7e5" "d6c789ad21b6a67e5bca1d7cfb86bf8640f" - ), - "imports": ["numpy"], - "depends": [], - } - }, -} - @pytest.mark.parametrize("pyodide_version", ["0.22.1", "0.23.3"]) def test_lock_spec_parsing(pyodide_version, tmp_path): @@ -54,14 +33,12 @@ def test_lock_spec_parsing(pyodide_version, tmp_path): assert spec.packages[key] == spec2.packages[key] -def test_check_wheel_filenames(): - lock_data = deepcopy(LOCK_EXAMPLE) - - spec = PyodideLockSpec(**lock_data) +def test_check_wheel_filenames(example_lock_data): + spec = PyodideLockSpec(**example_lock_data) spec.check_wheel_filenames() - lock_data["packages"]["numpy"]["name"] = "numpy2" # type: ignore[index] - spec = PyodideLockSpec(**lock_data) + example_lock_data["packages"]["numpy"]["name"] = "numpy2" # type: ignore[index] + spec = PyodideLockSpec(**example_lock_data) msg = ( ".*check_wheel_filenames failed.*\n.*numpy:\n.*" "Package name in wheel filename 'numpy' does not match 'numpy2'" @@ -69,8 +46,8 @@ def test_check_wheel_filenames(): with pytest.raises(ValueError, match=msg): spec.check_wheel_filenames() - lock_data["packages"]["numpy"]["version"] = "0.2.3" # type: ignore[index] - spec = PyodideLockSpec(**lock_data) + example_lock_data["packages"]["numpy"]["version"] = "0.2.3" # type: ignore[index] + spec = PyodideLockSpec(**example_lock_data) msg = ( ".*check_wheel_filenames failed.*\n.*numpy:\n.*" "Package name in wheel filename 'numpy' does not match 'numpy2'\n.*" @@ -81,11 +58,10 @@ def test_check_wheel_filenames(): spec.check_wheel_filenames() -def test_to_json_indent(tmp_path): - lock_data = deepcopy(LOCK_EXAMPLE) +def test_to_json_indent(tmp_path, example_lock_data): target_path = tmp_path / "pyodide-lock.json" - spec = PyodideLockSpec(**lock_data) + spec = PyodideLockSpec(**example_lock_data) spec.to_json(target_path) assert "\n" not in target_path.read_text() @@ -97,30 +73,30 @@ def test_to_json_indent(tmp_path): assert "\n" in target_path.read_text() -def test_update_sha256(monkeypatch): - monkeypatch.setattr("pyodide_lock.spec._generate_package_hash", lambda x: "abcd") - lock_data = deepcopy(LOCK_EXAMPLE) +def test_update_sha256(monkeypatch, example_lock_data): + monkeypatch.setattr("pyodide_lock.utils._generate_package_hash", lambda x: "abcd") - lock_data["packages"]["numpy"]["sha256"] = "0" # type: ignore[index] - spec = PyodideLockSpec(**lock_data) + example_lock_data["packages"]["numpy"]["sha256"] = "0" # type: ignore[index] + spec = PyodideLockSpec(**example_lock_data) assert spec.packages["numpy"].sha256 == "0" - spec.packages["numpy"].update_sha256(Path("/some/path")) + update_package_sha256(spec.packages["numpy"], Path("/some/path")) assert spec.packages["numpy"].sha256 == "abcd" -def test_extra_config_forbidden(): +def test_extra_config_forbidden(example_lock_data): from pydantic import ValidationError - lock_data = deepcopy(LOCK_EXAMPLE) - info_data = deepcopy(lock_data["info"]) - package_data = deepcopy(lock_data["packages"]["numpy"]) # type: ignore[index] + info_data = deepcopy(example_lock_data["info"]) + package_data = deepcopy( + example_lock_data["packages"]["numpy"] + ) # type: ignore[index] - lock_data["extra"] = "extra" + example_lock_data["extra"] = "extra" info_data["extra"] = "extra" # type: ignore[index] package_data["extra"] = "extra" with pytest.raises(ValidationError, match="extra fields not permitted"): - PyodideLockSpec(**lock_data) + PyodideLockSpec(**example_lock_data) with pytest.raises(ValidationError, match="extra fields not permitted"): InfoSpec(**info_data) # type: ignore[arg-type] diff --git a/tests/test_wheel.py b/tests/test_wheel.py index 8dfd8ec..265da85 100644 --- a/tests/test_wheel.py +++ b/tests/test_wheel.py @@ -1,52 +1,111 @@ import zipfile from pathlib import Path -from typing import TYPE_CHECKING -import pkginfo import pytest +from packaging.version import parse as version_parse from pyodide_lock import PackageSpec from pyodide_lock.utils import ( - _PYODIDE_MARKER_ENV as _ENV, -) -from pyodide_lock.utils import ( + _check_wheel_compatible, _generate_package_hash, - _wheel_depends, + add_wheels_to_spec, ) -if TYPE_CHECKING: - TDepExamples = dict[tuple[str], list[str]] - +# we test if our own wheel imports nicely +# so check if it is built in /dist, or else skip that test HERE = Path(__file__).parent DIST = HERE.parent / "dist" WHEEL = next(DIST.glob("*.whl")) if DIST.exists() else None -_ENV_NUM = {k: v for k, v in _ENV.items() if v[0] in "0123456789"} - -# from https://peps.python.org/pep-0508/#examples -PEP_0508_EXAMPLES: "TDepExamples" = { - ('requests [security,tests] >= 2.8.1, == 2.8.* ; python_version < "2.7"',): [], - ('argparse;python_version<"2.7"',): [], -} -MARKER_EXAMPLES: "TDepExamples" = { - (f'Expected ; {k} == "{v}"',): ["expected"] for k, v in _ENV.items() -} -NOT_MARKER_EXAMPLES: "TDepExamples" = { - (f'Not.expected ; {k} != "{v}"',): [] for k, v in _ENV.items() -} -NUM_MARKER_EXAMPLES: "TDepExamples" = { - (f'Expected ; {k} >= "{v}"',): ["expected"] for k, v in _ENV_NUM.items() -} -NOT_NUM_MARKER_EXAMPLES: "TDepExamples" = { - (f'Not-expected ; {k} < "{v}"',): [] for k, v in _ENV_NUM.items() -} + +def test_add_one(test_wheel_list, example_lock_spec): + new_lock_spec = add_wheels_to_spec(example_lock_spec, test_wheel_list[0:1]) + # py_one only should get added to the new spec + assert new_lock_spec.packages["py-one"].imports == ["one"] + assert "py-one" not in example_lock_spec.packages + + +def test_add_simple_deps(test_wheel_list, example_lock_spec): + example_lock_spec = add_wheels_to_spec(example_lock_spec, test_wheel_list[0:3]) + # py_one, needs_one and needs_one_opt should get added + assert "py-one" in example_lock_spec.packages + assert "needs-one" in example_lock_spec.packages + assert "needs-one-opt" in example_lock_spec.packages + # needs one opt should not depend on py_one + assert example_lock_spec.packages["needs-one-opt"].depends == [] + # needs one should depend on py_one + assert example_lock_spec.packages["needs-one"].depends == ["py-one"] + + +def test_add_deps_with_extras(test_wheel_list, example_lock_spec): + example_lock_spec = add_wheels_to_spec(example_lock_spec, test_wheel_list[0:4]) + # py_one, needs_one, needs_one_opt and test_extra_dependencies should get added + # because of the extra dependency in test_extra_dependencies, + # needs_one_opt should now depend on one + assert "test-extra-dependencies" in example_lock_spec.packages + assert example_lock_spec.packages["needs-one-opt"].depends == ["py-one"] + + +def test_missing_dep(test_wheel_list, example_lock_spec): + # this has a package with a missing dependency so should fail + with pytest.raises(RuntimeError): + example_lock_spec = add_wheels_to_spec(example_lock_spec, test_wheel_list[0:5]) + + +def test_url_rewriting(test_wheel_list, example_lock_spec): + example_lock_spec = add_wheels_to_spec( + example_lock_spec, test_wheel_list[0:3], base_url="http://www.nowhere.org/" + ) + # py_one, needs_one and needs_one_opt should get added + assert "py-one" in example_lock_spec.packages + assert "needs-one" in example_lock_spec.packages + assert "needs-one-opt" in example_lock_spec.packages + assert example_lock_spec.packages["py-one"].file_name.startswith( + "http://www.nowhere.org/py_one" + ) + + +def test_base_relative_path(test_wheel_list, example_lock_spec): + # this should make all the file names relative to the + # parent path of the wheels (which is "dist") + example_lock_spec = add_wheels_to_spec( + example_lock_spec, + test_wheel_list[0:3], + base_url="http://www.nowhere.org/", + base_path=test_wheel_list[0].parent.parent, + ) + # py_one, needs_one and needs_one_opt should get added + assert "py-one" in example_lock_spec.packages + assert "needs-one" in example_lock_spec.packages + assert "needs-one-opt" in example_lock_spec.packages + assert example_lock_spec.packages["needs-one-opt"].file_name.startswith( + "http://www.nowhere.org/dist/nEEds" + ) + + +# all requirements markers should not be needed, so dependencies should be empty +def test_markers_not_needed(test_wheel_list, example_lock_spec): + example_lock_spec = add_wheels_to_spec(example_lock_spec, test_wheel_list[5:6]) + assert example_lock_spec.packages["markers-not-needed-test"].depends == [] + + +# all requirements markers should be needed, +# so returned dependencies should be the same length as marker_examples_needed +def test_markers_needed(test_wheel_list, example_lock_spec, marker_examples_needed): + example_lock_spec = add_wheels_to_spec( + example_lock_spec, test_wheel_list[6:7], ignore_missing_dependencies=True + ) + assert len(example_lock_spec.packages["markers-needed-test"].depends) == len( + marker_examples_needed + ) @pytest.mark.skipif(WHEEL is None, reason="wheel test requires a built wheel") -def test_self_wheel(): +def test_self_wheel(example_lock_spec): assert WHEEL is not None - - spec = PackageSpec.from_wheel(WHEEL).json(indent=2, sort_keys=True) + example_lock_spec = add_wheels_to_spec( + example_lock_spec, [WHEEL], ignore_missing_dependencies=True + ) expected = PackageSpec( name="pyodide-lock", @@ -59,42 +118,69 @@ def test_self_wheel(): depends=["pydantic"], unvendored_tests=False, shared_library=False, - ).json(indent=2, sort_keys=True) + ) - assert spec == expected + assert example_lock_spec.packages["pyodide-lock"] == expected -def test_not_wheel(tmp_path): - wheel = tmp_path / "not-a-wheel-1.0.0-py3-none-any.whl" +def test_not_wheel(tmp_path, example_lock_spec): + wheel = tmp_path / "not_a_wheel-1.0.0-py3-none-any.whl" with zipfile.ZipFile(wheel, "w") as whlzip: whlzip.writestr("README.md", data="Not a wheel") with pytest.raises(RuntimeError, match="metadata"): - PackageSpec.from_wheel(wheel) + example_lock_spec = add_wheels_to_spec(example_lock_spec, [wheel]) @pytest.mark.parametrize( - "requires_dist,depends", + "bad_name", [ - *PEP_0508_EXAMPLES.items(), - *MARKER_EXAMPLES.items(), - *NOT_MARKER_EXAMPLES.items(), - *NUM_MARKER_EXAMPLES.items(), - *NOT_NUM_MARKER_EXAMPLES.items(), - # normalized names - (("PyYAML",), ["pyyaml"]), - (("pyyaml",), ["pyyaml"]), - (("pyyaml", "PyYAML"), ["pyyaml"]), - (("ruamel-yaml",), ["ruamel-yaml"]), - (("ruamel.yaml",), ["ruamel-yaml"]), - (("ruamel.yaml", "ruamel-yaml"), ["ruamel-yaml"]), - (("ruamel.yaml.jinja2",), ["ruamel-yaml-jinja2"]), + "bad-filename-for-a-wheel-1.0.0-py3-none-any.whl", + "bad_version_for_a_wheel-a.0.0-py3-none-any.whl", ], ) -def test_wheel_depends(requires_dist: tuple[str], depends: list[str]) -> None: - metadata = pkginfo.Distribution() - metadata.name = "foo" - metadata.requires_dist = requires_dist - assert ( - _wheel_depends(metadata) == depends - ), f"{requires_dist} does not yield {depends}" +def test_bad_names(tmp_path, bad_name, example_lock_spec): + wheel = tmp_path / bad_name + with zipfile.ZipFile(wheel, "w") as whlzip: + whlzip.writestr("README.md", data="Not a wheel") + with pytest.raises(RuntimeError, match="Wheel filename"): + example_lock_spec = add_wheels_to_spec(example_lock_spec, [wheel]) + + +def test_wheel_compatibility_checking(example_lock_spec): + target_python = version_parse(example_lock_spec.info.python) + python_tag = f"py{target_python.major}{target_python.minor}" + cpython_tag = f"cp{target_python.major}{target_python.minor}" + emscripten_tag = example_lock_spec.info.platform + "_" + example_lock_spec.info.arch + + # pure python 3 wheel + _check_wheel_compatible( + Path("test_wheel-1.0.0-py3-none-any.whl"), example_lock_spec.info + ) + # pure python 3.X wheel + _check_wheel_compatible( + Path(f"test_wheel-1.0.0-{python_tag}-none-any.whl"), example_lock_spec.info + ) + # pure python 2 or 3 wheel + _check_wheel_compatible( + Path("test_wheel-1.0.0-py2.py3-none-any.whl"), example_lock_spec.info + ) + # cpython emscripten correct version + _check_wheel_compatible( + Path(f"test_wheel-1.0.0-{cpython_tag}-{cpython_tag}-{emscripten_tag}.whl"), + example_lock_spec.info, + ) + with pytest.raises(RuntimeError): + # cpython emscripten incorrect version + _check_wheel_compatible( + Path( + f"test_wheel-1.0.0-{cpython_tag}-{cpython_tag}-emscripten_3_1_2_wasm32.whl" + ), + example_lock_spec.info, + ) + with pytest.raises(RuntimeError): + # a linux wheel + _check_wheel_compatible( + Path(f"test_wheel-1.0.0-{cpython_tag}-{cpython_tag}-linux_x86_64.whl"), + example_lock_spec.info, + )