Allow MetadataFactory.fetch_http to skip sha256

`MetadataFactory.fetch_http` must often download HTML pages, not
Updates.xml files. download.qt.io does not store checksums for these
files, so this particular function must be allowed to download these
pages without using a checksum.
This commit is contained in:
David Dalcino
2022-03-06 18:16:37 -08:00
parent 12d20a3d06
commit b62db9ee2a
3 changed files with 11 additions and 11 deletions

View File

@@ -452,7 +452,7 @@ class MetadataFactory:
def fetch_extensions(self, version: Version) -> List[str]:
versions_extensions = MetadataFactory.get_versions_extensions(
self.fetch_http(self.archive_id.to_url()), self.archive_id.category
self.fetch_http(self.archive_id.to_url(), False), self.archive_id.category
)
filtered = filter(
lambda ver_ext: ver_ext[0] == version and ver_ext[1],
@@ -469,7 +469,7 @@ class MetadataFactory:
return ver_ext[0]
versions_extensions = MetadataFactory.get_versions_extensions(
self.fetch_http(self.archive_id.to_url()), self.archive_id.category
self.fetch_http(self.archive_id.to_url(), False), self.archive_id.category
)
versions = sorted(filter(None, map(get_version, filter(filter_by, versions_extensions))))
iterables = itertools.groupby(versions, lambda version: version.minor)
@@ -479,7 +479,7 @@ class MetadataFactory:
return self.fetch_versions().latest()
def fetch_tools(self) -> List[str]:
html_doc = self.fetch_http(self.archive_id.to_url())
html_doc = self.fetch_http(self.archive_id.to_url(), False)
return list(MetadataFactory.iterate_folders(html_doc, "tools"))
def fetch_tool_modules(self, tool_name: str) -> List[str]:
@@ -572,9 +572,9 @@ class MetadataFactory:
return version
@staticmethod
def fetch_http(rest_of_url: str) -> str:
def fetch_http(rest_of_url: str, is_check_hash: bool = True) -> str:
timeout = (Settings.connection_timeout, Settings.response_timeout)
expected_hash = binascii.unhexlify(get_hash(rest_of_url, "sha256", timeout))
expected_hash = binascii.unhexlify(get_hash(rest_of_url, "sha256", timeout)) if is_check_hash else None
base_urls = Settings.baseurl, random.choice(Settings.fallbacks)
for i, base_url in enumerate(base_urls):
try:

View File

@@ -96,7 +96,7 @@ def test_cli_determine_qt_version(
monkeypatch, host, target, arch, version_or_spec: str, expected_version: Version, is_bad_spec: bool
):
_html = (Path(__file__).parent / "data" / f"{host}-{target}.html").read_text("utf-8")
monkeypatch.setattr(MetadataFactory, "fetch_http", lambda self, _: _html)
monkeypatch.setattr(MetadataFactory, "fetch_http", lambda *args, **kwargs: _html)
cli = Cli()
cli._setup_settings()

View File

@@ -132,7 +132,7 @@ def spec_regex():
)
def test_list_versions_tools(monkeypatch, spec_regex, os_name, target, in_file, expect_out_file):
_html = (Path(__file__).parent / "data" / in_file).read_text("utf-8")
monkeypatch.setattr(MetadataFactory, "fetch_http", lambda self, _: _html)
monkeypatch.setattr(MetadataFactory, "fetch_http", lambda *args, **kwargs: _html)
expected = json.loads((Path(__file__).parent / "data" / expect_out_file).read_text("utf-8"))
@@ -434,7 +434,7 @@ def test_list_qt_cli(
expect_set = expect
assert isinstance(expect_set, set)
def _mock_fetch_http(_, rest_of_url: str) -> str:
def _mock_fetch_http(_, rest_of_url, *args, **kwargs: str) -> str:
htmltext = (Path(__file__).parent / "data" / htmlfile).read_text("utf-8")
if not rest_of_url.endswith("Updates.xml"):
return htmltext
@@ -723,7 +723,7 @@ def test_list_describe_filters(meta: MetadataFactory, expect: str):
)
def test_list_to_version(monkeypatch, archive_id, spec, version_str, expect):
_html = (Path(__file__).parent / "data" / "mac-desktop.html").read_text("utf-8")
monkeypatch.setattr(MetadataFactory, "fetch_http", lambda self, _: _html)
monkeypatch.setattr(MetadataFactory, "fetch_http", lambda *args, **kwargs: _html)
if isinstance(expect, Exception):
with pytest.raises(CliInputError) as error:
@@ -847,7 +847,7 @@ def test_show_list_versions(monkeypatch, capsys):
def test_show_list_tools(monkeypatch, capsys):
page = (Path(__file__).parent / "data" / "mac-desktop.html").read_text("utf-8")
monkeypatch.setattr(MetadataFactory, "fetch_http", lambda self, _: page)
monkeypatch.setattr(MetadataFactory, "fetch_http", lambda *args, **kwargs: page)
expect_file = Path(__file__).parent / "data" / "mac-desktop-expect.json"
expect = "\n".join(json.loads(expect_file.read_text("utf-8"))["tools"]) + "\n"
@@ -918,7 +918,7 @@ def test_list_tool_cli(monkeypatch, capsys, host: str, target: str, tool_name: s
xml_data = json.loads(xmljson)
expected_tool_modules = set(xml_data["modules"])
def _mock_fetch_http(_, rest_of_url: str) -> str:
def _mock_fetch_http(_, rest_of_url, *args, **kwargs: str) -> str:
if not rest_of_url.endswith("Updates.xml"):
return htmltext
folder = urlparse(rest_of_url).path.split("/")[-2]