diff --git a/docs/release-notes.rst b/docs/release-notes.rst index 3801930..fedd988 100644 --- a/docs/release-notes.rst +++ b/docs/release-notes.rst @@ -10,8 +10,15 @@ Release Notes * **New**: Filter oudated packages by version part (:bug:`4`). +* Use Pydantic models to load and serialize caches. This could have better + perfomance and correctness but also introduce new bugs. + *Modules changelog:* +* **bugzilla**: + + * Switch to REST API from XMLRPC. + * **pgo**: * **outdated**: diff --git a/find_work/cache.py b/find_work/cache.py index 59fbe94..2bb3323 100644 --- a/find_work/cache.py +++ b/find_work/cache.py @@ -5,7 +5,6 @@ """ Implementation of caching functionality. """ import hashlib -import json import tempfile from pathlib import Path from typing import Any, SupportsBytes @@ -110,15 +109,14 @@ def _get_cache_path(cache_key: SupportsBytes) -> Path: return file.with_suffix(".json") -def write_json_cache(data: Any, cache_key: SupportsBytes, *, raw: bool = False, - **kwargs: Any) -> None: +def write_raw_json_cache(data: SupportsBytes, cache_key: SupportsBytes) -> None: """ - Write a JSON cache file in a temporary directory. Keyword arguments are - passed to :py:function:`json.dump` as is. + Write a JSON cache file in a temporary directory. - :param data: data to serialize + This function silently fails on OS errors. + + :param data: raw JSON :param cache_key: cache key object - :param raw: skip encoding and write raw data instead """ cache = _get_cache_path(cache_key) @@ -129,31 +127,23 @@ def write_json_cache(data: Any, cache_key: SupportsBytes, *, raw: bool = False, with open(cache, "wb") as file: try: - if raw: - cache.write_bytes(bytes(data)) - return - json.dump(data, file, **kwargs) + file.write(bytes(data)) except OSError: pass -def read_json_cache(cache_key: SupportsBytes, *, raw: bool = False, - **kwargs: Any) -> Any | None: +def read_raw_json_cache(cache_key: SupportsBytes) -> bytes: """ - Read a JSON cache file stored in a temporary directory. Keyword arguments - are passed to :py:function:`json.load` as is. + Read a JSON cache file stored in a temporary directory. :param cache_key: cache key object - :param raw: skip decoding and return raw file contents instead - :return: decoded data or ``None`` + :return: raw JSON file contents or empty byte string """ cache = _get_cache_path(cache_key) if not cache.is_file(): - return None + return b"" with open(cache, "rb") as file: - if raw: - return file.read() - return json.load(file, **kwargs) + return file.read() diff --git a/find_work/cli/bugzilla.py b/find_work/cli/bugzilla.py index 69fcf68..d73c0a0 100644 --- a/find_work/cli/bugzilla.py +++ b/find_work/cli/bugzilla.py @@ -4,32 +4,21 @@ """ CLI subcommands for everything Bugzilla. - -This Python module also defines some regular expressions. - -``isodate_re`` matches ISO 8601 time/date strings: - ->>> isodate_re.fullmatch("2024") is None -True ->>> isodate_re.fullmatch("20090916T09:04:18") is None -False """ -import json -import re -import time import warnings -from collections.abc import Iterable +from collections.abc import Collection +from datetime import datetime from typing import Any -from xmlrpc.client import DateTime import click import gentoopm +import pydantic_core from tabulate import tabulate from find_work.cache import ( - read_json_cache, - write_json_cache, + read_raw_json_cache, + write_raw_json_cache, ) from find_work.cli import Message, Options, ProgressDots from find_work.constants import BUGZILLA_URL @@ -45,40 +34,24 @@ with warnings.catch_warnings(): import bugzilla from bugzilla.bug import Bug -isodate_re = re.compile(r"\d{4}\d{2}\d{2}T\d{2}:\d{2}:\d{2}") - -class BugEncoder(json.JSONEncoder): - def default(self, o: Any) -> Any: - if isinstance(o, DateTime): - return o.value - return json.JSONEncoder.default(self, o) - - -def as_datetime(obj: dict) -> dict: - result: dict = {} - for key, value in obj.items(): - # FIXME: every matching string will be converted to DateTime - if isinstance(value, str) and isodate_re.fullmatch(value): - result[key] = DateTime(value) - continue - result[key] = value - return result - - -def _bugs_from_json(data: list[dict]) -> list[Bug]: +def _bugs_from_raw_json(raw_json: str | bytes) -> list[Bug]: + data: list[dict] = pydantic_core.from_json(raw_json) with requests_session() as session: - bz = bugzilla.Bugzilla(BUGZILLA_URL, requests_session=session) + bz = bugzilla.Bugzilla(BUGZILLA_URL, requests_session=session, + force_rest=True) return [Bug(bz, dict=bug) for bug in data] -def _bugs_to_json(data: Iterable[Bug]) -> list[dict]: - return [bug.get_raw_data() for bug in data] +def _bugs_to_raw_json(data: Collection[Bug]) -> bytes: + raw_data = [bug.get_raw_data() for bug in data] + return pydantic_core.to_json(raw_data, exclude_none=True) def _fetch_bugs(options: Options, **kwargs: Any) -> list[Bug]: with requests_session() as session: - bz = bugzilla.Bugzilla(BUGZILLA_URL, requests_session=session) + bz = bugzilla.Bugzilla(BUGZILLA_URL, requests_session=session, + force_rest=True) query = bz.build_query( short_desc=options.bugzilla.short_desc or None, product=options.bugzilla.product or None, @@ -93,7 +66,7 @@ def _fetch_bugs(options: Options, **kwargs: Any) -> list[Bug]: return bz.query(query) -def _collect_bugs(data: Iterable[Bug], options: Options) -> list[BugView]: +def _collect_bugs(data: Collection[Bug], options: Options) -> list[BugView]: if options.only_installed: pm = gentoopm.get_package_manager() @@ -105,7 +78,7 @@ def _collect_bugs(data: Iterable[Bug], options: Options) -> list[BugView]: if package not in pm.installed: continue - date = time.strftime("%F", bug.last_change_time.timetuple()) + date = datetime.fromisoformat(bug.last_change_time).date().isoformat() item = BugView(bug.id, date, bug.assigned_to, bug.summary) result.append(item) return result @@ -117,12 +90,11 @@ def _list_bugs(cmd: str, options: Options, **filters: Any) -> None: options.say(Message.CACHE_LOAD) with dots(): - cached_data = read_json_cache(options.cache_key, - object_hook=as_datetime) - if cached_data is not None: + raw_data = read_raw_json_cache(options.cache_key) + if raw_data: options.say(Message.CACHE_READ) with dots(): - data = _bugs_from_json(cached_data) + data = _bugs_from_raw_json(raw_data) else: options.vecho("Fetching data from Bugzilla API", nl=False, err=True) with dots(): @@ -132,8 +104,8 @@ def _list_bugs(cmd: str, options: Options, **filters: Any) -> None: return options.say(Message.CACHE_WRITE) with dots(): - json_data = _bugs_to_json(data) - write_json_cache(json_data, options.cache_key, cls=BugEncoder) + raw_json = _bugs_to_raw_json(data) + write_raw_json_cache(raw_json, options.cache_key) bumps = _collect_bugs(data, options) if len(bumps) != 0: diff --git a/find_work/cli/execute.py b/find_work/cli/execute.py index 3115ee9..9d6903f 100644 --- a/find_work/cli/execute.py +++ b/find_work/cli/execute.py @@ -13,7 +13,7 @@ from click_aliases import ClickAliasedGroup from find_work.cli import Options from find_work.config import ConfigAlias, ConfigModuleOption, load_config -from find_work.types import CliOptionKind +from find_work.types._config import CliOptionKind def _new_click_option(opt: ConfigModuleOption) -> Callable: diff --git a/find_work/cli/pgo.py b/find_work/cli/pgo.py index 24573c6..5a88472 100644 --- a/find_work/cli/pgo.py +++ b/find_work/cli/pgo.py @@ -5,24 +5,44 @@ """ CLI subcommands for Gentoo Packages website. """ import asyncio -from collections.abc import Iterable import click import gentoopm +from pydantic import TypeAdapter from sortedcontainers import SortedDict, SortedSet from tabulate import tabulate from find_work.cache import ( - read_json_cache, - write_json_cache, + read_raw_json_cache, + write_raw_json_cache, +) +from find_work.cli import ( + Message, + Options, + ProgressDots, +) +from find_work.constants import ( + PGO_BASE_URL, + PGO_API_URL, +) +from find_work.types import ( + VersionBump, + VersionPart, +) +from find_work.types._pgo import ( + GraphQlResponse, + OutdatedPackage, + PkgCheckResult, + StableCandidate, ) -from find_work.cli import Message, Options, ProgressDots -from find_work.constants import PGO_BASE_URL, PGO_API_URL -from find_work.types import VersionBump, VersionPart from find_work.utils import aiohttp_session +OutdatedPackageSet = frozenset[OutdatedPackage] +PkgCheckResultSet = frozenset[PkgCheckResult] +StableCandidateSet = frozenset[StableCandidate] -async def _fetch_outdated() -> list[dict]: + +async def _fetch_outdated() -> OutdatedPackageSet: query = """query { outdatedPackages{ Atom @@ -34,24 +54,22 @@ async def _fetch_outdated() -> list[dict]: async with aiohttp_session() as session: async with session.post(PGO_API_URL, json={"query": query}, raise_for_status=True) as response: - data = await response.json() - return data.get("data", {}).get("outdatedPackages", []) + raw_data = await response.read() + + graphql = GraphQlResponse.model_validate_json(raw_data) + return graphql.data.outdated -def _collect_version_bumps(data: Iterable[dict], +def _collect_version_bumps(data: OutdatedPackageSet, options: Options) -> SortedSet[VersionBump]: if options.only_installed: pm = gentoopm.get_package_manager() result: SortedSet[VersionBump] = SortedSet() for item in data: - bump = VersionBump(item["Atom"], - item.get("GentooVersion", "(unknown)"), - item.get("NewestVersion", "(unknown)")) - - if options.only_installed and bump.atom not in pm.installed: + if options.only_installed and item.atom not in pm.installed: continue - result.add(bump) + result.add(item.as_version_bump) return result @@ -68,8 +86,12 @@ async def _outdated(options: Options) -> None: options.say(Message.CACHE_LOAD) with dots(): - data = read_json_cache(options.cache_key) - if data is None: + raw_data = read_raw_json_cache(options.cache_key) + if raw_data: + options.say(Message.CACHE_READ) + with dots(): + data = TypeAdapter(OutdatedPackageSet).validate_json(raw_data) + else: options.vecho("Fetching data from Gentoo Packages API", nl=False, err=True) with dots(): @@ -79,7 +101,10 @@ async def _outdated(options: Options) -> None: return options.say(Message.CACHE_WRITE) with dots(): - write_json_cache(data, options.cache_key) + raw_json = TypeAdapter(OutdatedPackageSet).dump_json( + data, by_alias=True, exclude_none=True + ) + write_raw_json_cache(raw_json, options.cache_key) no_work = True for bump in _collect_version_bumps(data, options): @@ -96,24 +121,18 @@ async def _outdated(options: Options) -> None: options.say(Message.NO_WORK) -async def _fetch_maintainer_stabilization(maintainer: str) -> list[dict]: - url = f"{PGO_BASE_URL}/maintainer/{maintainer}/stabilization.json" +async def _fetch_maintainer_stabilization(maintainer: str) -> PkgCheckResultSet: + + url = PGO_BASE_URL + f"/maintainer/{maintainer}/stabilization.json" async with aiohttp_session() as session: async with session.get(url, raise_for_status=True) as response: - data = await response.json() + raw_data = await response.read() - # bring data to a common structure - return [ - { - "Atom": f"{item['category']}/{item['package']}", - "Version": item["version"], - "Message": item["message"], - } - for item in data - ] + data = TypeAdapter(StableCandidateSet).validate_json(raw_data) + return frozenset(item.as_pkgcheck_result for item in data) -async def _fetch_all_stabilization() -> list[dict]: +async def _fetch_all_stabilization() -> PkgCheckResultSet: query = """query { pkgCheckResults(Class: "StableRequest") { Atom @@ -125,27 +144,29 @@ async def _fetch_all_stabilization() -> list[dict]: async with aiohttp_session() as session: async with session.post(PGO_API_URL, json={"query": query}, raise_for_status=True) as response: - data = await response.json() - return data.get("data", {}).get("pkgCheckResults", []) + raw_data = await response.read() + + graphql = GraphQlResponse.model_validate_json(raw_data) + return graphql.data.pkgcheck -async def _fetch_stabilization(options: Options) -> list[dict]: +async def _fetch_stabilization(options: Options) -> PkgCheckResultSet: if options.maintainer: return await _fetch_maintainer_stabilization(options.maintainer) return await _fetch_all_stabilization() -def _collect_stable_candidates(data: list[dict], +def _collect_stable_candidates(data: PkgCheckResultSet, options: Options) -> SortedDict[str, str]: if options.only_installed: pm = gentoopm.get_package_manager() result: SortedDict[str, str] = SortedDict() for item in data: - if options.only_installed and item["Atom"] not in pm.installed: + if options.only_installed and item.atom not in pm.installed: continue - key = "-".join([item["Atom"], item["Version"]]) - result[key] = item["Message"] + key = "-".join([item.atom, item.version]) + result[key] = item.message return result @@ -154,8 +175,12 @@ async def _stabilization(options: Options) -> None: options.say(Message.CACHE_LOAD) with dots(): - data = read_json_cache(options.cache_key) - if data is None: + raw_data = read_raw_json_cache(options.cache_key) + if raw_data: + options.say(Message.CACHE_READ) + with dots(): + data = TypeAdapter(PkgCheckResultSet).validate_json(raw_data) + else: options.vecho("Fetching data from Gentoo Packages API", nl=False, err=True) with dots(): @@ -165,7 +190,10 @@ async def _stabilization(options: Options) -> None: return options.say(Message.CACHE_WRITE) with dots(): - write_json_cache(data, options.cache_key) + raw_data = TypeAdapter(PkgCheckResultSet).dump_json( + data, by_alias=True, exclude_none=True + ) + write_raw_json_cache(raw_data, options.cache_key) candidates = _collect_stable_candidates(data, options) if len(candidates) != 0: @@ -193,6 +221,9 @@ def outdated(options: Options, version_part: VersionPart | None = None) -> None: @click.command() @click.pass_obj def stabilization(options: Options) -> None: - """ Find outdated packages. """ + """ + Find stable candidates. + """ + options.cache_key.feed("stabilization") asyncio.run(_stabilization(options)) diff --git a/find_work/cli/repology.py b/find_work/cli/repology.py index 6b58077..6827d34 100644 --- a/find_work/cli/repology.py +++ b/find_work/cli/repology.py @@ -5,7 +5,7 @@ """ CLI subcommands for everything Repology. """ import asyncio -from collections.abc import Iterable +from collections.abc import Collection import click import gentoopm @@ -17,14 +17,15 @@ from repology_client.types import Package from sortedcontainers import SortedSet from find_work.cache import ( - read_json_cache, - write_json_cache, + read_raw_json_cache, + write_raw_json_cache, ) from find_work.cli import Message, Options, ProgressDots from find_work.types import VersionBump, VersionPart from find_work.utils import aiohttp_session -ProjectsMapping = dict[str, set[Package]] +PackageSet = set[Package] +ProjectsMapping = dict[str, PackageSet] async def _fetch_outdated(options: Options) -> ProjectsMapping: @@ -38,17 +39,7 @@ async def _fetch_outdated(options: Options) -> ProjectsMapping: session=session, **filters) -def _projects_from_raw_json(raw_json: str | bytes) -> ProjectsMapping: - projects_adapter = TypeAdapter(ProjectsMapping) - return projects_adapter.validate_json(raw_json) - - -def _projects_to_raw_json(data: ProjectsMapping) -> bytes: - projects_adapter = TypeAdapter(ProjectsMapping) - return projects_adapter.dump_json(data, exclude_none=True) - - -def _collect_version_bumps(data: Iterable[set[Package]], +def _collect_version_bumps(data: Collection[PackageSet], options: Options) -> SortedSet[VersionBump]: pm = gentoopm.get_package_manager() @@ -59,7 +50,7 @@ def _collect_version_bumps(data: Iterable[set[Package]], for pkg in packages: if pkg.status == "outdated" and pkg.repo == options.repology.repo: - # ``pkg.version`` can contain spaces, better avoid it! + # "pkg.version" can contain spaces, better avoid it! origversion = pkg.origversion or pkg.version atom = pm.Atom(f"={pkg.visiblename}-{origversion}") @@ -84,11 +75,11 @@ async def _outdated(options: Options) -> None: options.say(Message.CACHE_LOAD) with dots(): - raw_cached_data = read_json_cache(options.cache_key, raw=True) - if raw_cached_data is not None: + raw_data = read_raw_json_cache(options.cache_key) + if raw_data: options.say(Message.CACHE_READ) with dots(): - data = _projects_from_raw_json(raw_cached_data) + data = TypeAdapter(ProjectsMapping).validate_json(raw_data) else: options.vecho("Fetching data from Repology API", nl=False, err=True) try: @@ -99,8 +90,10 @@ async def _outdated(options: Options) -> None: return options.say(Message.CACHE_WRITE) with dots(): - raw_json_data = _projects_to_raw_json(data) - write_json_cache(raw_json_data, options.cache_key, raw=True) + raw_json = TypeAdapter(ProjectsMapping).dump_json( + data, exclude_none=True + ) + write_raw_json_cache(raw_json, options.cache_key) no_work = True for bump in _collect_version_bumps(data.values(), options): diff --git a/find_work/config.py b/find_work/config.py index 106db4f..b6b4544 100644 --- a/find_work/config.py +++ b/find_work/config.py @@ -17,7 +17,7 @@ from platformdirs import PlatformDirs import find_work.data from find_work.constants import DEFAULT_CONFIG, ENTITY, PACKAGE -from find_work.types import CliOptionKind +from find_work.types._config import CliOptionKind # FIXME: Find out how to use Pydantic for type validation diff --git a/find_work/types.py b/find_work/types/__init__.py similarity index 90% rename from find_work/types.py rename to find_work/types/__init__.py index 4113ec6..270a204 100644 --- a/find_work/types.py +++ b/find_work/types/__init__.py @@ -3,11 +3,12 @@ # No warranty """ -Type definitions for the application, implemented as enums and Pydantic models. +Public type definitions for the application, implemented as enums and Pydantic +models. """ from dataclasses import field -from enum import Enum, StrEnum, auto +from enum import StrEnum, auto from itertools import zip_longest from pydantic.dataclasses import dataclass @@ -71,10 +72,3 @@ class BugView: last_change_date: str = field(compare=False) assigned_to: str = field(compare=False) summary: str = field(compare=False) - - -class CliOptionKind(Enum): - SIMPLE = auto() - - OPTION = auto() - FLAG = auto() diff --git a/find_work/types/_config.py b/find_work/types/_config.py new file mode 100644 index 0000000..c5d02f9 --- /dev/null +++ b/find_work/types/_config.py @@ -0,0 +1,16 @@ +# SPDX-License-Identifier: WTFPL +# SPDX-FileCopyrightText: 2024 Anna +# No warranty + +""" +Type definitions for configuration file. +""" + +from enum import Enum, auto + + +class CliOptionKind(Enum): + SIMPLE = auto() + + OPTION = auto() + FLAG = auto() diff --git a/find_work/types/_pgo.py b/find_work/types/_pgo.py new file mode 100644 index 0000000..fb19c2c --- /dev/null +++ b/find_work/types/_pgo.py @@ -0,0 +1,106 @@ +# SPDX-License-Identifier: WTFPL +# SPDX-FileCopyrightText: 2024 Anna +# No warranty + +""" +Internal type definitions for Gentoo Packages GraphQL API, implemented as +Pydantic models. +""" + +from pydantic import BaseModel, ConfigDict, Field + +from find_work.types import VersionBump + + +class OutdatedPackage(BaseModel): + """ + Information from Repology about an outdated package in the Gentoo tree. + """ + model_config = ConfigDict(frozen=True) + + #: The atom of the affected package. + atom: str = Field(alias="Atom") + + #: The latest version of the package that is present in the Gentoo tree. + old_version: str = Field(alias="GentooVersion", default="(unknown)") + + #: The latest version of the package that is present upstream. + new_version: str = Field(alias="NewestVersion", default="(unknown)") + + @property + def as_version_bump(self) -> VersionBump: + """ + Equivalent :py:class:`find_work.types.VersionBump` object. + """ + + return VersionBump(self.atom, self.old_version, self.new_version) + + +class PkgCheckResult(BaseModel): + """ + Single warning from pkgcheck for a package version. + """ + model_config = ConfigDict(frozen=True) + + #: Atom of the package that is affected by this pkgcheck warning. + atom: str = Field(alias="Atom") + + # Version of the package that is affected by this pkgcheck warning. + version: str = Field(alias="Version") + + # Message of this warning, e.g. 'uses deprecated EAPI 5'. + message: str = Field(alias="Message") + + +class StableCandidate(BaseModel): + """ + Stabilization candidate representation. + """ + model_config = ConfigDict(frozen=True) + + #: Category name. + category: str + + #: Package name. + package: str + + #: Package version. + version: str + + #: Pkgcheck message. + message: str + + @property + def as_pkgcheck_result(self) -> PkgCheckResult: + """ + Equivalent :py:class:`PkgCheckResult` object. + """ + + data = { + "Atom": "/".join([self.category, self.package]), + "Version": self.version, + "Message": self.message, + } + return PkgCheckResult.model_validate(data) + + +class GraphQlData(BaseModel): + """ + Data returned by GraphQL. + """ + + #: Results of outdatedPackages query. + outdated: frozenset[OutdatedPackage] = Field(alias="outdatedPackages", + default=frozenset()) + + #: Results of pkgCheckResults query. + pkgcheck: frozenset[PkgCheckResult] = Field(alias="pkgCheckResults", + default=frozenset()) + + +class GraphQlResponse(BaseModel): + """ + Root GraphQL response. + """ + + data: GraphQlData = Field(default_factory=GraphQlData) diff --git a/pyproject.toml b/pyproject.toml index 25a88bd..3d3ae25 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ dependencies = [ "pkgcheck", "platformdirs<5,>=4", "pydantic<3,>=2", + "pydantic-core<3,>=2", "python-bugzilla", "repology-client<2,>=0.0.2", "requests<3,>=2", @@ -52,7 +53,6 @@ docs = [ test = [ "pkgcore", "pytest", - "pytest-recording", ] [project.scripts] @@ -78,7 +78,7 @@ include = [ ] [tool.pytest.ini_options] -addopts = "--doctest-modules --block-network" +addopts = "--doctest-modules" [tool.mypy] disallow_untyped_defs = true diff --git a/tests/cassettes/test_bugzilla/test_bugs_json_roundtrip.yaml b/tests/cassettes/test_bugzilla/test_bugs_json_roundtrip.yaml deleted file mode 100644 index 35aa8b8..0000000 --- a/tests/cassettes/test_bugzilla/test_bugs_json_roundtrip.yaml +++ /dev/null @@ -1,108 +0,0 @@ -interactions: -- request: - body: null - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - User-Agent: - - python-requests/2.31.0 - method: HEAD - uri: https://bugs.gentoo.org/xmlrpc.cgi - response: - body: - string: '' - headers: - Connection: - - Keep-Alive - Content-Type: - - text/plain; charset=utf-8 - Date: - - Wed, 10 Jan 2024 02:02:40 GMT - Keep-Alive: - - timeout=15, max=100 - Server: - - Apache - status: - code: 200 - message: OK -- request: - body: ' - - - - Bugzilla.version - - - - - - - - - - - - - - - - ' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '161' - Content-Type: - - text/xml - User-Agent: - - python-bugzilla/3.2.0 - method: POST - uri: https://bugs.gentoo.org/xmlrpc.cgi - response: - body: - string: version5.0.6 - headers: - Connection: - - Keep-Alive - Content-Encoding: - - gzip - Content-Type: - - text/xml - Content-security-policy: - - frame-ancestors 'self' - Date: - - Wed, 10 Jan 2024 02:02:41 GMT - ETag: - - 7sMmJjLeC0KDNcgYXJQLEw - Keep-Alive: - - timeout=15, max=100 - SOAPServer: - - SOAP::Lite/Perl/1.27 - Server: - - Apache - Set-Cookie: - - Bugzilla_login_request_cookie=0jZsUHjVEQ; path=/; secure; HttpOnly - Strict-transport-security: - - max-age=15768000; includeSubDomains - Transfer-Encoding: - - chunked - Vary: - - Accept-Encoding - X-content-type-options: - - nosniff - X-frame-options: - - SAMEORIGIN - X-xss-protection: - - 1; mode=block - status: - code: 200 - message: OK -version: 1 diff --git a/tests/data/bug74072.json b/tests/data/bug74072.json deleted file mode 100644 index 3bad329..0000000 --- a/tests/data/bug74072.json +++ /dev/null @@ -1 +0,0 @@ -[{"id": 74072, "cc_detail": [{"name": "cantel", "id": 68480, "email": "cantel", "real_name": "Alex"}, {"name": "Captainsifff", "id": 32737, "email": "Captainsifff", "real_name": "Captain Sifff"}], "is_confirmed": true, "url": "", "cf_runtime_testing_required": "---", "flags": [], "is_open": false, "blocks": [], "op_sys": "Linux", "keywords": [], "component": "[OLD] Unspecified", "platform": "All", "groups": [], "depends_on": [], "qa_contact": "", "last_change_time": "20090916T09:04:18", "assigned_to": "bug-wranglers", "classification": "Unclassified", "priority": "High", "creator_detail": {"id": 17226, "name": "Augury", "real_name": "augury@vampares.org", "email": "Augury"}, "assigned_to_detail": {"name": "bug-wranglers", "id": 921, "email": "bug-wranglers", "real_name": "Gentoo Linux bug wranglers"}, "is_creator_accessible": true, "see_also": [], "cf_stabilisation_atoms": "", "alias": [], "version": "unspecified", "summary": "ld errors", "whiteboard": "", "severity": "trivial", "resolution": "WONTFIX", "is_cc_accessible": true, "creator": "Augury", "creation_time": "20041211T01:09:12", "product": "Gentoo Linux", "cc": ["cantel", "Captainsifff"], "status": "RESOLVED", "target_milestone": "---"}] diff --git a/tests/data/bug74072.json.license b/tests/data/bug74072.json.license deleted file mode 100644 index 66b7765..0000000 --- a/tests/data/bug74072.json.license +++ /dev/null @@ -1,3 +0,0 @@ -SPDX-FileCopyrightText: 2024 Gentoo Authors - -SPDX-License-Identifier: CC0-1.0 diff --git a/tests/test_bugzilla.py b/tests/test_bugzilla.py deleted file mode 100644 index 868fe33..0000000 --- a/tests/test_bugzilla.py +++ /dev/null @@ -1,20 +0,0 @@ -# SPDX-License-Identifier: WTFPL -# SPDX-FileCopyrightText: 2024 Anna -# No warranty - -import json -from pathlib import Path - -import pytest - -from find_work.cli.bugzilla import ( - _bugs_from_json, - _bugs_to_json, -) - - -@pytest.mark.vcr -def test_bugs_json_roundtrip(): - with open(Path(__file__).parent / "data" / "bug74072.json") as file: - data: list[dict] = json.load(file) - assert data == _bugs_to_json(_bugs_from_json(data)) diff --git a/tests/test_config.py b/tests/test_config.py index 00604ee..a8ed189 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -8,7 +8,7 @@ from pathlib import Path import pytest from find_work.config import Config -from find_work.types import CliOptionKind +from find_work.types._config import CliOptionKind def test_alias_empty(): diff --git a/tests/test_repology.py b/tests/test_repology.py index 3140409..c59fcd7 100644 --- a/tests/test_repology.py +++ b/tests/test_repology.py @@ -7,33 +7,7 @@ from repology_client.types import Package from find_work.types import VersionBump from find_work.cli import Options -from find_work.cli.repology import ( - _collect_version_bumps, - _projects_from_raw_json, - _projects_to_raw_json, -) - - -def test_projects_json_roundtrip(): - data = { - "firefox": { - Package( - repo="gentoo", - visiblename="www-client/firefox", - version="9999", - status="test", - licenses=frozenset(["GPL-2", "LGPL-2.1", "MPL-2.0"]), - ), - Package( - repo="gentoo", - visiblename="www-client/firefox-bin", - version="9999", - status="test", - licenses=frozenset(["GPL-2", "LGPL-2.1", "MPL-2.0"]), - ), - }, - } - assert data == _projects_from_raw_json(_projects_to_raw_json(data)) +from find_work.cli.repology import _collect_version_bumps def test_collect_version_bumps(): diff --git a/tox.ini b/tox.ini index 518d25c..d6c8d92 100644 --- a/tox.ini +++ b/tox.ini @@ -8,7 +8,6 @@ env_list = py3{11,12}, lint [testenv] description = run the tests + mypy deps = - aiodns mypy sortedcontainers-stubs types-requests @@ -17,8 +16,8 @@ deps = extras = test commands = - mypy find_work tests pytest -vv {tty:--color=yes} {posargs} + mypy find_work tests [testenv:lint] description = run the linters