mirror of
https://github.com/miurahr/aqtinstall.git
synced 2025-12-16 20:27:05 +03:00
fix: delete combination check
Signed-off-by: Hiroshi Miura <miurahr@linux.com>
This commit is contained in:
59
.github/workflows/check-combinations.yml
vendored
59
.github/workflows/check-combinations.yml
vendored
@@ -1,59 +0,0 @@
|
||||
name: "Check combinations.json"
|
||||
on:
|
||||
schedule:
|
||||
# Run at midnight on the first of every month
|
||||
# https://crontab.guru/once-a-month
|
||||
- cron: "0 0 1 * *"
|
||||
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'ci/**'
|
||||
|
||||
jobs:
|
||||
check_combinations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 20
|
||||
|
||||
- name: Update the "update-combinations" branch (ff merge to master)
|
||||
uses: MaximeHeckel/github-action-merge-fast-forward@b4e9b28dce30a682e5fbe3135be4053ea2a75e15
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
branchtomerge: "origin/master"
|
||||
branch: "update-combinations"
|
||||
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Build and install
|
||||
run: |
|
||||
python -m pip install ./ --user
|
||||
|
||||
- name: Install generate_combinations dependencies
|
||||
run: pip install -U jsoncomparison
|
||||
|
||||
- name: Check combinations.json
|
||||
run: PYTHONPATH=$(pwd) python3 ci/generate_combinations.py --write --no-tqdm
|
||||
|
||||
- name: Commit and make pull request
|
||||
uses: gr2m/create-or-update-pull-request-action@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
title: "Update `aqt/combinations.json`"
|
||||
body: |
|
||||
The `ci/generate_combinations.py` script has detected changes to the repo at https://download.qt.io.
|
||||
This PR will update `aqt/combinations.json` to account for those changes.
|
||||
|
||||
Posted from [the `check_combinations` action](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||
|
||||
branch: "update-combinations"
|
||||
path: "aqt/combinations.json"
|
||||
commit-message: "Update aqt/combinations.json"
|
||||
author: "Qt Repo Watchbot <qtrepowatchbot[bot]@users.noreply.github.com>"
|
||||
@@ -359,11 +359,6 @@ class SettingsClass:
|
||||
self.loggingconf = os.path.join(os.path.dirname(__file__), "logging.ini")
|
||||
|
||||
def load_settings(self, file: Optional[Union[str, TextIO]] = None) -> None:
|
||||
with open(
|
||||
os.path.join(os.path.dirname(__file__), "combinations.json"),
|
||||
"r",
|
||||
) as j:
|
||||
self._combinations = json.load(j)[0]
|
||||
if file is not None:
|
||||
if isinstance(file, str):
|
||||
result = self.config.read(file)
|
||||
|
||||
@@ -293,11 +293,6 @@ class Cli:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _select_unexpected_modules(self, qt_version: str, modules: Optional[List[str]]) -> List[str]:
|
||||
"""Returns a sorted list of all the requested modules that do not exist in the combinations.json file."""
|
||||
available = Settings.available_modules(qt_version)
|
||||
return sorted(set(modules or []) - set(available or []))
|
||||
|
||||
@staticmethod
|
||||
def _determine_qt_version(
|
||||
qt_version_or_spec: str, host: str, target: str, arch: str, base_url: str = Settings.baseurl
|
||||
@@ -422,10 +417,6 @@ class Cli:
|
||||
if not self._check_qt_arg_combination(qt_version, os_name, target, arch):
|
||||
self.logger.warning(self._warning_unknown_target_arch_combo([os_name, target, arch]))
|
||||
all_extra = True if modules is not None and "all" in modules else False
|
||||
if not all_extra:
|
||||
unexpected_modules = self._select_unexpected_modules(qt_version, modules)
|
||||
if unexpected_modules:
|
||||
self.logger.warning(self._warning_unexpected_modules(unexpected_modules))
|
||||
|
||||
qt_archives: QtArchives = retry_on_bad_connection(
|
||||
lambda base_url: QtArchives(
|
||||
|
||||
@@ -1,216 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, Generator, Iterable, List, Optional, Tuple, Union, NamedTuple
|
||||
|
||||
from jsoncomparison import NO_DIFF, Compare
|
||||
|
||||
from aqt.exceptions import ArchiveConnectionError, ArchiveDownloadError
|
||||
from aqt.helper import Settings, setup_logging
|
||||
from aqt.metadata import ArchiveId, MetadataFactory, Versions
|
||||
|
||||
|
||||
def is_blacklisted_tool(tool_name: str) -> bool:
|
||||
for prefix in ("tools_qt3dstudio_",):
|
||||
if tool_name.startswith(prefix):
|
||||
return True
|
||||
for suffix in ("_preview", "_early_access"):
|
||||
if tool_name.endswith(suffix):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def iter_archive_ids(
|
||||
*,
|
||||
category: str,
|
||||
hosts: Iterable[str] = ArchiveId.HOSTS,
|
||||
targets: Optional[Iterable[str]] = None,
|
||||
) -> Generator[ArchiveId, None, None]:
|
||||
for host in sorted(hosts):
|
||||
use_targets = targets
|
||||
if use_targets is None:
|
||||
use_targets = ArchiveId.TARGETS_FOR_HOST[host]
|
||||
for target in use_targets:
|
||||
yield ArchiveId(category, host, target)
|
||||
|
||||
|
||||
def iter_arches() -> Generator[dict, None, None]:
|
||||
logger.info("Fetching arches")
|
||||
archive_ids = list(iter_archive_ids(category="qt"))
|
||||
for archive_id in tqdm(archive_ids):
|
||||
for version in ("latest", "6.5.3", "6.2.4", "5.15.2", "5.13.2", "5.9.9"):
|
||||
if archive_id.target == "winrt" and (version == "latest" or version.startswith("6")):
|
||||
continue
|
||||
for arch_name in MetadataFactory(archive_id, architectures_ver=version).getList():
|
||||
yield {
|
||||
"os_name": archive_id.host,
|
||||
"target": archive_id.target,
|
||||
"arch": arch_name,
|
||||
}
|
||||
|
||||
|
||||
def iter_tool_variants() -> Generator[dict, None, None]:
|
||||
for archive_id in iter_archive_ids(category="tools"):
|
||||
logger.info("Fetching tool variants for {}".format(archive_id))
|
||||
for tool_name in tqdm(sorted(MetadataFactory(archive_id).fetch_tools())):
|
||||
if is_blacklisted_tool(tool_name):
|
||||
continue
|
||||
for tool_variant in MetadataFactory(
|
||||
archive_id, tool_name=tool_name
|
||||
).getList():
|
||||
yield {
|
||||
"os_name": archive_id.host,
|
||||
"target": archive_id.target,
|
||||
"tool_name": tool_name,
|
||||
"arch": tool_variant,
|
||||
}
|
||||
|
||||
|
||||
def iter_qt_minor_groups(
|
||||
host: str = "linux", target: str = "desktop"
|
||||
) -> Generator[Tuple[int, int], None, None]:
|
||||
versions: Versions = MetadataFactory(ArchiveId("qt", host, target)).fetch_versions()
|
||||
for minor_group in versions:
|
||||
v = minor_group[0]
|
||||
yield v.major, v.minor
|
||||
|
||||
|
||||
def iter_modules_for_qt_minor_groups(
|
||||
host: str = "linux", target: str = "desktop", arch: str = "gcc_64"
|
||||
) -> Generator[Dict, None, None]:
|
||||
logger.info("Fetching qt modules for {}/{}".format(host, target))
|
||||
for major, minor in tqdm(list(iter_qt_minor_groups(host, target))):
|
||||
use_linux_gcc = (host == "linux" and arch == "gcc_64" and major == 6 and minor >= 7)
|
||||
use_arch = "linux_gcc_64" if use_linux_gcc else arch
|
||||
yield {
|
||||
"qt_version": f"{major}.{minor}",
|
||||
"modules": MetadataFactory(
|
||||
ArchiveId("qt", host, target), modules_query=MetadataFactory.ModulesQuery(f"{major}.{minor}.0", use_arch)
|
||||
).getList(),
|
||||
}
|
||||
|
||||
|
||||
def list_qt_versions(host: str = "linux", target: str = "desktop") -> List[str]:
|
||||
all_versions = list()
|
||||
versions: Versions = MetadataFactory(ArchiveId("qt", host, target)).getList()
|
||||
for minor_group in versions:
|
||||
all_versions.extend([str(ver) for ver in minor_group])
|
||||
return all_versions
|
||||
|
||||
|
||||
def merge_records(arch_records) -> List[Dict]:
|
||||
all_records: List[Dict] = []
|
||||
hashes = set()
|
||||
for record in arch_records:
|
||||
_hash = record["os_name"], record["target"], record["arch"]
|
||||
if _hash not in hashes:
|
||||
all_records.append(record)
|
||||
hashes.add(_hash)
|
||||
for sorting_key in ("arch", "target", "os_name"):
|
||||
all_records = sorted(all_records, key=lambda d: d[sorting_key])
|
||||
return all_records
|
||||
|
||||
|
||||
def generate_combos(new_archive: List[str]):
|
||||
return {
|
||||
"qt": merge_records(iter_arches()),
|
||||
"tools": list(iter_tool_variants()),
|
||||
"modules": list(iter_modules_for_qt_minor_groups()),
|
||||
"versions": list_qt_versions(),
|
||||
"new_archive": new_archive,
|
||||
}
|
||||
|
||||
|
||||
def alphabetize_modules(combos: Dict[str, Union[List[Dict], List[str]]]):
|
||||
for i, item in enumerate(combos["modules"]):
|
||||
combos["modules"][i]["modules"] = sorted(item["modules"])
|
||||
|
||||
|
||||
def write_combinations_json(
|
||||
combos: List[Dict[str, Union[List[Dict], List[str]]]],
|
||||
filename: Path,
|
||||
):
|
||||
logger.info(f"Write file {filename}")
|
||||
json_text = json.dumps(combos, sort_keys=True, indent=2)
|
||||
if filename.write_text(json_text, encoding="utf_8") == 0:
|
||||
raise RuntimeError("Failed to write file!")
|
||||
|
||||
|
||||
def main(filename: Path, is_write_file: bool, is_verbose: bool) -> int:
|
||||
try:
|
||||
expect = json.loads(filename.read_text())
|
||||
alphabetize_modules(expect[0])
|
||||
actual = [generate_combos(new_archive=expect[0]["new_archive"])]
|
||||
diff = Compare().check(expect, actual)
|
||||
|
||||
if is_verbose:
|
||||
logger.info("=" * 80)
|
||||
logger.info("Program Output:")
|
||||
logger.info(json.dumps(actual, sort_keys=True, indent=2))
|
||||
|
||||
logger.info("=" * 80)
|
||||
logger.info(f"Comparison with existing '{filename}':")
|
||||
logger.info(json.dumps(diff, sort_keys=True, indent=2))
|
||||
logger.info("=" * 80)
|
||||
|
||||
if diff == NO_DIFF:
|
||||
logger.info(f"{filename} is up to date! No PR is necessary this time!")
|
||||
return 0 # no difference
|
||||
if is_write_file:
|
||||
logger.info(f"{filename} has changed; writing changes to file...")
|
||||
write_combinations_json(actual, filename)
|
||||
return 0 # File written successfully
|
||||
logger.warning(f"{filename} is out of date, but no changes were written")
|
||||
return 1 # difference reported
|
||||
|
||||
except (ArchiveConnectionError, ArchiveDownloadError) as e:
|
||||
logger.error(format(e))
|
||||
return 1
|
||||
|
||||
|
||||
def get_tqdm(disable: bool):
|
||||
if disable:
|
||||
return lambda x: x
|
||||
|
||||
from tqdm import tqdm as base_tqdm
|
||||
|
||||
return lambda *a: base_tqdm(*a, disable=disable)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
Settings.load_settings()
|
||||
setup_logging()
|
||||
logger = logging.getLogger("aqt.generate_combos")
|
||||
|
||||
json_filename = Path(__file__).parent.parent / "aqt/combinations.json"
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate combinations.json from download.qt.io, "
|
||||
"compare with existing file, and write file to correct differences"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--write",
|
||||
help="write to combinations.json if changes detected",
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-tqdm",
|
||||
help="disable progress bars (makes CI logs easier to read)",
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
help="Print a json dump of the new file, and an abbreviated diff with the old file",
|
||||
action="store_true",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
tqdm = get_tqdm(args.no_tqdm)
|
||||
|
||||
sys.exit(
|
||||
main(filename=json_filename, is_write_file=args.write, is_verbose=args.verbose)
|
||||
)
|
||||
@@ -24,7 +24,7 @@ if __name__ == "__main__":
|
||||
adddata_arg = "{src:s};aqt"
|
||||
else:
|
||||
adddata_arg = "{src:s}:aqt"
|
||||
for data in ["aqt/logging.ini", "aqt/settings.ini", "aqt/combinations.json"]:
|
||||
for data in ["aqt/logging.ini", "aqt/settings.ini"]:
|
||||
args.append('--add-data')
|
||||
args.append(adddata_arg.format(src=data))
|
||||
args.append(os.path.join(tools_dir, "launch_aqt.py"))
|
||||
|
||||
Reference in New Issue
Block a user