Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BUG: fetch_string_and_headers compat: raise in and out of pyodide #129

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions micropip/_compat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@

to_js = compatibility_layer.to_js

HttpStatusError = compatibility_layer.HttpStatusError


__all__ = [
"REPODATA_INFO",
Expand All @@ -45,4 +47,5 @@
"loadPackage",
"get_dynlibs",
"to_js",
"HttpStatusError",
]
29 changes: 27 additions & 2 deletions micropip/_compat/_compat_in_pyodide.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,21 @@
if TYPE_CHECKING:
pass

import pyodide
from packaging.version import parse
from pyodide._package_loader import get_dynlibs
from pyodide.ffi import IN_BROWSER, to_js
from pyodide.http import pyfetch

if parse(pyodide.__version__) > parse("0.27"):
from pyodide.http import HttpStatusError, pyfetch
else:

class HttpStatusError(Exception): # type: ignore [no-redef]
"""we just want this to be defined, it is never going to be raised"""

pass

from pyodide.http import pyfetch

from .compatibility_layer import CompatibilityLayer

Expand All @@ -28,6 +40,15 @@


class CompatibilityInPyodide(CompatibilityLayer):
class HttpStatusError(Exception):
status_code: int
message: str

def __init__(self, status_code: int, message: str):
self.status_code = status_code
self.message = message
super().__init__(message)

@staticmethod
def repodata_info() -> dict[str, str]:
return REPODATA_INFO
Expand All @@ -50,7 +71,11 @@ async def fetch_bytes(url: str, kwargs: dict[str, str]) -> bytes:
async def fetch_string_and_headers(
url: str, kwargs: dict[str, str]
) -> tuple[str, dict[str, str]]:
response = await pyfetch(url, **kwargs)
try:
response = await pyfetch(url, **kwargs)
response.raise_for_status()
except HttpStatusError as e:
raise CompatibilityInPyodide.HttpStatusError(e.status, str(e)) from e

content = await response.string()
headers: dict[str, str] = response.headers
Expand Down
16 changes: 15 additions & 1 deletion micropip/_compat/_compat_not_in_pyodide.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import re
from pathlib import Path
from typing import IO, TYPE_CHECKING, Any
from urllib.error import HTTPError
from urllib.request import Request, urlopen
from urllib.response import addinfourl

Expand All @@ -15,6 +16,15 @@ class CompatibilityNotInPyodide(CompatibilityLayer):
# Vendored from packaging
_canonicalize_regex = re.compile(r"[-_.]+")

class HttpStatusError(Exception):
status_code: int
message: str

def __init__(self, status_code: int, message: str):
self.status_code = status_code
self.message = message
super().__init__(message)

class loadedPackages(CompatibilityLayer.loadedPackages):
@staticmethod
def to_py():
Expand All @@ -40,7 +50,11 @@ async def fetch_bytes(url: str, kwargs: dict[str, Any]) -> bytes:
async def fetch_string_and_headers(
url: str, kwargs: dict[str, Any]
) -> tuple[str, dict[str, str]]:
response = CompatibilityNotInPyodide._fetch(url, kwargs=kwargs)
try:
response = CompatibilityNotInPyodide._fetch(url, kwargs=kwargs)
except HTTPError as e:
raise CompatibilityNotInPyodide.HttpStatusError(e.code, str(e)) from e

headers = {k.lower(): v for k, v in response.headers.items()}
return response.read().decode(), headers

Expand Down
8 changes: 8 additions & 0 deletions micropip/_compat/compatibility_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,14 @@ class CompatibilityLayer(ABC):
All of the following methods / properties must be implemented for use both inside and outside of pyodide.
"""

class HttpStatusError(ABC, Exception):
status_code: int
message: str

@abstractmethod
def __init__(self, status_code: int, message: str):
pass

class loadedPackages(ABC):
@staticmethod
@abstractmethod
Expand Down
28 changes: 25 additions & 3 deletions micropip/package_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from packaging.utils import InvalidWheelFilename
from packaging.version import InvalidVersion, Version

from ._compat import fetch_string_and_headers
from ._compat import HttpStatusError, fetch_string_and_headers
from ._utils import is_package_compatible, parse_version
from .externals.mousebender.simple import from_project_details_html
from .wheelinfo import WheelInfo
Expand Down Expand Up @@ -276,11 +276,33 @@ async def query_package(

try:
metadata, headers = await fetch_string_and_headers(url, _fetch_kwargs)
except HttpStatusError as e:
if e.status_code == 404:
continue
raise
except OSError:
continue
# temporary pyodide compatibility.
# pypi now set proper CORS on 404 (https://github.com/pypi/warehouse/pull/16339),
# but stable pyodide (<0.27) does not yet have proper HttpStatusError exception
# so when: on pyodide and 0.26.x we ignore OSError. Once we drop support for 0.26
# all this OSError except clause should just be gone.
try:
import pyodide
from packaging.version import parse

if parse(pyodide.__version__) > parse("0.27"):
# reraise on more recent pyodide.
raise
continue
except ImportError:
# not in pyodide.
raise

content_type = headers.get("content-type", "").lower()
parser = _select_parser(content_type, name)
try:
parser = _select_parser(content_type, name)
except ValueError as e:
raise ValueError(f"Error trying to decode url: {url}") from e
return parser(metadata)
else:
raise ValueError(
Expand Down
5 changes: 5 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,6 +340,7 @@ def mock_fetch(monkeypatch, mock_importlib):
def _mock_package_index_gen(
httpserver,
pkgs=("black", "pytest", "numpy", "pytz", "snowballstemmer"),
pkgs_not_found=(),
content_type="application/json",
suffix="_json.json.gz",
):
Expand All @@ -355,6 +356,10 @@ def _mock_package_index_gen(
content_type=content_type,
headers={"Access-Control-Allow-Origin": "*"},
)
for pkg in pkgs_not_found:
httpserver.expect_request(f"/{base}/{pkg}/").respond_with_data(
"Not found", status=404, content_type="text/plain"
)

index_url = httpserver.url_for(base)

Expand Down
37 changes: 37 additions & 0 deletions tests/test_compat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
"""
test that function in compati behave the same

"""

import pytest
from pytest_pyodide import run_in_pyodide


@pytest.mark.driver_timeout(10)
def test_404(selenium_standalone_micropip, httpserver, request):
selenium_standalone_micropip.set_script_timeout(11)

@run_in_pyodide(packages=["micropip", "packaging"])
async def _inner_test_404_raise(selenium, url):
import pyodide
import pytest
from packaging.version import parse

from micropip._compat import HttpStatusError, fetch_string_and_headers

if parse(pyodide.__version__) > parse("0.27"):
ExpectedErrorClass = HttpStatusError
else:
ExpectedErrorClass = OSError

with pytest.raises(ExpectedErrorClass):
await fetch_string_and_headers(url, {})

httpserver.expect_request("/404").respond_with_data(
"Not found",
status=404,
content_type="text/plain",
headers={"Access-Control-Allow-Origin": "*"},
)
url_404 = httpserver.url_for("/404")
_inner_test_404_raise(selenium_standalone_micropip, url_404)
4 changes: 2 additions & 2 deletions tests/test_package_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,8 @@ def test_contain_placeholder():
)
async def test_query_package(mock_fixture, pkg1, pkg2, request):
gen_mock_server = request.getfixturevalue(mock_fixture)
pkg1_index_url = gen_mock_server(pkgs=[pkg1])
pkg2_index_url = gen_mock_server(pkgs=[pkg2])
pkg1_index_url = gen_mock_server(pkgs=[pkg1], pkgs_not_found=[pkg2])
pkg2_index_url = gen_mock_server(pkgs=[pkg2], pkgs_not_found=[pkg1])

for _index_urls in (
pkg1_index_url,
Expand Down
Loading