diff --git a/tests/test_archives.py b/tests/test_archives.py index 8fbbf79..88300af 100644 --- a/tests/test_archives.py +++ b/tests/test_archives.py @@ -447,6 +447,7 @@ def test_archives_weird_module_7z_name( expect_archives: Set[str], ): monkeypatch.setattr("aqt.archives.getUrl", lambda *args: xml) + monkeypatch.setattr("aqt.archives.get_hash", lambda *args, **kwargs: hashlib.sha256(bytes(xml, "utf-8")).hexdigest()) qt_archives = make_archives_fn(subarchives, modules, is_include_base) archives = {pkg.archive for pkg in qt_archives.archives} diff --git a/tests/test_helper.py b/tests/test_helper.py index e5d9621..f846f3e 100644 --- a/tests/test_helper.py +++ b/tests/test_helper.py @@ -331,3 +331,12 @@ def test_helper_getUrl_conn_error(monkeypatch): getUrl(url, timeout) assert e.type == ArchiveConnectionError assert expect_re.match(format(e.value)) + + +def test_helper_getUrl_checksum_error(monkeypatch): + mocked_get, mocked_session_get = mock_get_redirect(0) + monkeypatch.setattr(requests, "get", mocked_get) + monkeypatch.setattr(requests.Session, "get", mocked_session_get) + with pytest.raises(ArchiveChecksumError) as e: + getUrl("some_url", timeout=(5, 5), expected_hash=b"AAAAAAAAAAA") + assert e.type == ArchiveChecksumError diff --git a/tests/test_install.py b/tests/test_install.py index 06cd0ea..b14f335 100644 --- a/tests/test_install.py +++ b/tests/test_install.py @@ -1,3 +1,4 @@ +import hashlib import logging import os import re @@ -134,11 +135,14 @@ def make_mock_geturl_download_archive( for _arc in archives: assert _arc.filename_7z.endswith(".7z") + xml = "\n{}\n".format("\n".join([archive.xml_package_update() for archive in archives])) + def mock_getUrl(url: str, *args) -> str: if url.endswith(updates_url): - return "\n{}\n".format("\n".join([archive.xml_package_update() for archive in archives])) + return xml elif url.endswith(".sha256"): - return "" # Skip the checksum + filename = url.split("/")[-1][: -len(".sha256")] + return f"{hashlib.sha256(bytes(xml, 'utf-8')).hexdigest()} {filename}" assert False def mock_download_archive(url: str, out: str, *args): @@ -707,13 +711,16 @@ def test_install( ), ) def test_install_nonexistent_archives(monkeypatch, capsys, cmd, xml_file: Optional[str], expected): + xml = (Path(__file__).parent / "data" / xml_file).read_text("utf-8") if xml_file else "" + def mock_get_url(url, *args, **kwargs): if not xml_file: raise ArchiveDownloadError(f"Failed to retrieve file at {url}\nServer response code: 404, reason: Not Found") - return (Path(__file__).parent / "data" / xml_file).read_text("utf-8") + return xml monkeypatch.setattr("aqt.archives.getUrl", mock_get_url) - monkeypatch.setattr("aqt.helper.getUrl", mock_get_url) + monkeypatch.setattr("aqt.archives.get_hash", lambda *args, **kwargs: hashlib.sha256(bytes(xml, "utf-8")).hexdigest()) + monkeypatch.setattr("aqt.metadata.get_hash", lambda *args, **kwargs: hashlib.sha256(bytes(xml, "utf-8")).hexdigest()) monkeypatch.setattr("aqt.metadata.getUrl", mock_get_url) cli = Cli() diff --git a/tests/test_list.py b/tests/test_list.py index bdffee3..b4c24cd 100644 --- a/tests/test_list.py +++ b/tests/test_list.py @@ -1,3 +1,4 @@ +import hashlib import json import os import re @@ -953,6 +954,7 @@ def test_list_tool_cli(monkeypatch, capsys, host: str, target: str, tool_name: s def test_fetch_http_ok(monkeypatch): + monkeypatch.setattr("aqt.metadata.get_hash", lambda *args, **kwargs: hashlib.sha256(b"some_html_content").hexdigest()) monkeypatch.setattr("aqt.metadata.getUrl", lambda **kwargs: "some_html_content") assert MetadataFactory.fetch_http("some_url") == "some_html_content" @@ -966,6 +968,7 @@ def test_fetch_http_failover(monkeypatch): raise ArchiveDownloadError() return "some_html_content" + monkeypatch.setattr("aqt.metadata.get_hash", lambda *args, **kwargs: hashlib.sha256(b"some_html_content").hexdigest()) monkeypatch.setattr("aqt.metadata.getUrl", _mock) # Require that the first attempt failed, but the second did not @@ -973,33 +976,19 @@ def test_fetch_http_failover(monkeypatch): assert len(urls_requested) == 2 -def test_fetch_http_download_error(monkeypatch): +@pytest.mark.parametrize("exception_on_error", (ArchiveDownloadError, ArchiveConnectionError)) +def test_fetch_http_download_error(monkeypatch, exception_on_error): urls_requested = set() def _mock(url, **kwargs): urls_requested.add(url) - raise ArchiveDownloadError() + raise exception_on_error() + monkeypatch.setattr("aqt.metadata.get_hash", lambda *args, **kwargs: hashlib.sha256(b"some_html_content").hexdigest()) monkeypatch.setattr("aqt.metadata.getUrl", _mock) - with pytest.raises(ArchiveDownloadError) as e: + with pytest.raises(exception_on_error) as e: MetadataFactory.fetch_http("some_url") - assert e.type == ArchiveDownloadError - - # Require that a fallback url was tried - assert len(urls_requested) == 2 - - -def test_fetch_http_conn_error(monkeypatch): - urls_requested = set() - - def _mock(url, **kwargs): - urls_requested.add(url) - raise ArchiveConnectionError() - - monkeypatch.setattr("aqt.metadata.getUrl", _mock) - with pytest.raises(ArchiveConnectionError) as e: - MetadataFactory.fetch_http("some_url") - assert e.type == ArchiveConnectionError + assert e.type == exception_on_error # Require that a fallback url was tried assert len(urls_requested) == 2