From af515055e53254c082e386b667fa2c16afb5a16e Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Mon, 19 Jun 2023 04:24:40 +0200 Subject: [PATCH 01/13] expire on remote server --- ...26_005856_jb_issue_30_server_migration.rst | 7 + src/backy/api.py | 95 +++++- src/backy/backup.py | 206 +++++++++++- src/backy/client.py | 107 +++++- src/backy/daemon.py | 65 +++- src/backy/logging.py | 13 +- src/backy/main.py | 123 ++++--- src/backy/revision.py | 50 ++- src/backy/scheduler.py | 52 ++- src/backy/tests/test_api.py | 317 ++++++++++++++++++ src/backy/tests/test_backy.py | 48 +-- src/backy/tests/test_client.py | 14 +- src/backy/tests/test_daemon.py | 23 +- src/backy/tests/test_main.py | 85 ++++- src/backy/tests/test_revision.py | 6 +- src/backy/tests/test_scheduler.py | 21 +- 16 files changed, 1082 insertions(+), 150 deletions(-) create mode 100644 changelog.d/20230626_005856_jb_issue_30_server_migration.rst create mode 100644 src/backy/tests/test_api.py diff --git a/changelog.d/20230626_005856_jb_issue_30_server_migration.rst b/changelog.d/20230626_005856_jb_issue_30_server_migration.rst new file mode 100644 index 00000000..ec2acfa5 --- /dev/null +++ b/changelog.d/20230626_005856_jb_issue_30_server_migration.rst @@ -0,0 +1,7 @@ +- Support backup job migration across servers + +- Add `tags {set, add, remove}` subcommand + +- Add `expire` subcommand + +- logging: improve exception formatting diff --git a/src/backy/api.py b/src/backy/api.py index 63c15393..b39495d2 100644 --- a/src/backy/api.py +++ b/src/backy/api.py @@ -1,15 +1,28 @@ import datetime import re from json import JSONEncoder +from pathlib import Path from typing import Any, List, Tuple from aiohttp import hdrs, web -from aiohttp.web_exceptions import HTTPAccepted, HTTPNotFound, HTTPUnauthorized +from aiohttp.web_exceptions import ( + HTTPAccepted, + HTTPBadRequest, + HTTPForbidden, + HTTPNotFound, + HTTPPreconditionFailed, + HTTPPreconditionRequired, + HTTPServiceUnavailable, + HTTPUnauthorized, +) from aiohttp.web_middlewares import middleware from aiohttp.web_runner import AppRunner, TCPSite from structlog.stdlib import BoundLogger import backy.daemon +from backy.backup import Backup +from backy.revision import Revision +from backy.scheduler import Job class BackyJSONEncoder(JSONEncoder): @@ -42,6 +55,14 @@ def __init__(self, daemon, log): web.post("/v1/reload", self.reload_daemon), web.get("/v1/jobs", self.get_jobs), web.post("/v1/jobs/{job_name}/run", self.run_job), + web.get("/v1/backups", self.list_backups), + web.post("/v1/backups/{backup_name}/purge", self.run_purge), + web.post("/v1/backups/{backup_name}/touch", self.touch_backup), + web.get("/v1/backups/{backup_name}/revs", self.get_revs), + web.put( + "/v1/backups/{backup_name}/revs/{rev_spec}/tags", + self.put_tags, + ), ] ) @@ -121,8 +142,8 @@ async def to_json(self, request: web.Request, handler): else: return web.json_response(resp, dumps=BackyJSONEncoder().encode) - async def get_status(self, request: web.Request): - filter = request.query.get("filter", "") + async def get_status(self, request: web.Request) -> List[dict]: + filter = request.query.get("filter", None) if filter: filter = re.compile(filter) return self.daemon.status(filter) @@ -130,10 +151,10 @@ async def get_status(self, request: web.Request): async def reload_daemon(self, request: web.Request): self.daemon.reload() - async def get_jobs(self, request: web.Request): + async def get_jobs(self, request: web.Request) -> List[Job]: return list(self.daemon.jobs.values()) - async def get_job(self, request: web.Request): + async def get_job(self, request: web.Request) -> Job: try: name = request.match_info.get("job_name", None) return self.daemon.jobs[name] @@ -144,3 +165,67 @@ async def run_job(self, request: web.Request): j = await self.get_job(request) j.run_immediately.set() raise HTTPAccepted() + + async def list_backups(self, request: web.Request) -> List[str]: + return self.daemon.find_dead_backups() + + async def get_backup(self, request: web.Request) -> Backup: + name = request.match_info.get("backup_name", None) + if not name: + raise HTTPNotFound() + if name in self.daemon.jobs: + raise HTTPForbidden() + try: + path = Path(self.daemon.base_dir).joinpath(name).resolve() + if ( + not path.exists() + or Path(self.daemon.base_dir).resolve() not in path.parents + ): + raise FileNotFoundError + return Backup(path, request["log"]) + except FileNotFoundError: + raise HTTPNotFound() + + async def run_purge(self, request: web.Request): + backup = await self.get_backup(request) + backup.set_purge_pending() + raise HTTPAccepted() + + async def touch_backup(self, request: web.Request): + backup = await self.get_backup(request) + backup.touch() + + async def get_revs(self, request: web.Request) -> List[Revision]: + backup = await self.get_backup(request) + if request.query.get("only_clean", "") == "1": + revs = backup.clean_history + else: + revs = backup.history + return [r for r in revs if not r.location] + + async def put_tags(self, request: web.Request): + json = await request.json() + if "old_tags" not in json: + raise HTTPPreconditionRequired() + old_tags = set(json["old_tags"]) + if "new_tags" not in json: + raise HTTPBadRequest() + new_tags = set(json["new_tags"]) + + autoremove = request.query.get("autoremove", "") == "1" + spec = request.match_info.get("rev_spec", None) + backup = await self.get_backup(request) + try: + if not backup.tags( + "set", + spec, + new_tags, + old_tags, + autoremove=autoremove, + force=True, + ): + raise HTTPPreconditionFailed() + except KeyError: + raise HTTPNotFound() + except BlockingIOError: + raise HTTPServiceUnavailable() diff --git a/src/backy/backup.py b/src/backy/backup.py index 0d45dcd5..af3a35de 100644 --- a/src/backy/backup.py +++ b/src/backy/backup.py @@ -1,9 +1,11 @@ +import asyncio import datetime import fcntl import os import re import subprocess import time +from collections import defaultdict from enum import Enum from math import ceil, floor from pathlib import Path @@ -11,6 +13,8 @@ import tzlocal import yaml +from aiohttp import ClientConnectionError, ClientError, ClientResponseError +from aiohttp.web_exceptions import HTTPForbidden, HTTPNotFound from structlog.stdlib import BoundLogger import backy.backends.chunked @@ -24,6 +28,7 @@ ) from .backends import BackendException, BackyBackend, select_backend +from .client import APIClient, APIClientManager from .ext_deps import BACKY_EXTRACT from .quarantine import QuarantineStore from .revision import Revision, Trust, filter_schedule_tags @@ -77,10 +82,10 @@ def locked_function(self, *args, skip_lock=False, **kw): except BlockingIOError: self.log.warning( "lock-no-exclusive", - _fmt_msg="Failed to get exclusive lock for '{function}'. Continuing.", + _fmt_msg="Failed to get exclusive lock for '{function}'.", function=f.__name__, ) - return + raise else: try: return f(self, *args, **kw) @@ -164,6 +169,13 @@ def __init__(self, path: Path, log: BoundLogger): self.quarantine = QuarantineStore(self.path, self.log) + @property + def name(self) -> str: + return self.path.name + + def to_dict(self): + return self.config + def scan(self) -> None: self.history = [] self._by_uuid = {} @@ -178,6 +190,15 @@ def scan(self) -> None: # The history is stored: oldest first. newest last. self.history.sort(key=lambda r: r.timestamp) + def touch(self): + self.path.touch() + + def set_purge_pending(self): + self.path.joinpath(".purge_pending").touch() + + def clear_purge_pending(self): + self.path.joinpath(".purge_pending").unlink(missing_ok=True) + @property def clean_history(self) -> List[Revision]: """History without incomplete revisions.""" @@ -187,6 +208,19 @@ def clean_history(self) -> List[Revision]: def contains_distrusted(self) -> bool: return any((r == Trust.DISTRUSTED for r in self.clean_history)) + def validate_tags(self, tags): + missing_tags = ( + filter_schedule_tags(tags) - self.schedule.schedule.keys() + ) + if missing_tags: + self.log.error( + "unknown-tags", + _fmt_msg="The following tags are missing from the schedule: {unknown_tags}\n" + "Check the config file, add the `manual:` prefix or disable tag validation (-f)", + unknown_tags=", ".join(missing_tags), + ) + raise RuntimeError("Unknown tags") + ################# # Making backups @@ -205,21 +239,49 @@ def forget(self, revision: str) -> None: for r in self.find_revisions(revision): r.remove() + @locked(target=".backup", mode="exclusive") + def expire(self): + self.schedule.expire(self) + + @locked(target=".backup", mode="exclusive") + def tags( + self, + action: Literal["set", "add", "remove"], + revision: str, + tags: set[str], + expect: Optional[set[str]] = None, + autoremove: bool = False, + force=False, + ) -> bool: + self.scan() + revs = self.find_revisions(revision) + if not force and action != "remove": + self.validate_tags(tags) + for r in revs: + if expect is not None and expect != r.tags: + self.log.error("tags-expectation-failed") + return False + for r in revs: + match action: + case "set": + r.tags = tags + case "add": + r.tags |= tags + case "remove": + r.tags -= tags + case _: + raise ValueError(f"invalid action '{action}'") + if not r.tags and autoremove: + r.remove() + else: + r.write_info() + return True + @locked(target=".backup", mode="exclusive") @locked(target=".purge", mode="shared") def backup(self, tags: set[str], force: bool = False) -> None: if not force: - missing_tags = ( - filter_schedule_tags(tags) - self.schedule.schedule.keys() - ) - if missing_tags: - self.log.error( - "unknown-tags", - _fmt_msg="The following tags are missing from the schedule: {unknown_tags}\n" - "Check the config file, add the `manual:` prefix or disable tag validation (-f)", - unknown_tags=", ".join(missing_tags), - ) - raise RuntimeError("Unknown tags") + self.validate_tags(tags) self.path.joinpath("last").unlink(missing_ok=True) self.path.joinpath("last.rev").unlink(missing_ok=True) @@ -291,6 +353,7 @@ def verify(self, revision: str) -> None: @locked(target=".purge", mode="exclusive") def purge(self) -> None: self.history[-1].backend.purge() + self.clear_purge_pending() ################# # Restoring @@ -652,3 +715,120 @@ def find(self, spec: str) -> Revision: pass self.log.warning("find-rev-not-found", spec=spec) raise KeyError(spec) + + ################### + # Syncing Revisions + + @locked(target=".backup", mode="exclusive") + async def push_metadata(self, peers): + grouped = defaultdict(list) + for r in self.history: + if r.pending_changes: + grouped[r.location].append(r) + self.log.info( + "push-start", changes=sum(len(l) for l in grouped.values()) + ) + async with APIClientManager(peers, self.log) as apis: + await asyncio.gather( + *[ + self._push_metadata(apis[server], grouped[server]) + for server in apis + ] + ) + self.log.info("push-end") + + async def _push_metadata(self, api: APIClient, revs: List[Revision]): + purge_required = False + for r in revs: + log = self.log.bind( + server=r.location, + rev_uuid=r.uuid, + ) + log.debug( + "push-updating-tags", + old_tags=r.orig_tags, + new_tags=r.tags, + ) + try: + await api.put_tags(r, autoremove=True) + if r.tags: + r.orig_tags = r.tags + r.write_info() + else: + r.remove(force=True) + purge_required = True + except ClientResponseError: + log.warning("push-client-error", exc_style="short") + except ClientConnectionError: + log.info("push-connection-error", exc_style="short") + except ClientError: + log.warning("push-error", exc_info=True) + + if purge_required: + log = self.log.bind(server=api.server_name) + log.debug("push-purging-remote") + try: + await api.run_purge(self.name) + except ClientResponseError: + log.warning("push-purge-client-error", exc_style="short") + except ClientConnectionError: + log.info("push-purge-connection-error", exc_style="short") + except ClientError: + log.warning("push-purge-error", exc_info=True) + + @locked(target=".backup", mode="exclusive") + async def pull_metadata(self, peers: dict): + async def remove_dead_peer(): + for r in list(self.history): + if r.location and r.location not in peers: + self.log.info("pull-removing-dead-peer", rev_uuid=r.uuid) + r.remove(force=True) + + self.log.info("pull-start") + async with APIClientManager(peers, self.log) as apis: + await asyncio.gather( + remove_dead_peer(), + *[self._pull_metadata(apis[server]) for server in apis], + ) + self.log.info("pull-end") + + async def _pull_metadata(self, api: APIClient): + log = self.log.bind(server=api.server_name) + try: + await api.touch_backup(self.name) + remote_revs = await api.get_revs(self) + + except ClientResponseError as e: + if e.status in [ + HTTPNotFound.status_code, + HTTPForbidden.status_code, + ]: + log.debug("pull-not-found") + else: + log.warning("pull-client-error", exc_style="short") + remote_revs = [] + except ClientConnectionError: + log.info("pull-connection-error", exc_style="short") + return + except ClientError: + log.warning("pull-error", exc_info=True) + remote_revs = [] + log.debug( + "pull-found-matching-server", + revs=len(remote_revs), + ) + + matching_uuids = { + r.uuid for r in self.history if r.location == api.server_name + } + remote_uuids = {r.uuid for r in remote_revs} + for uuid in matching_uuids - remote_uuids: + log.warning("pull-removing-unknown-rev", rev_uuid=uuid) + self.find_by_uuid(uuid).remove(force=True) + + for r in remote_revs: + r.write_info() + log.debug( + "pull-updated-rev", + rev_uid=r.uuid, + ) diff --git a/src/backy/client.py b/src/backy/client.py index 485c1ccb..2f839f4c 100644 --- a/src/backy/client.py +++ b/src/backy/client.py @@ -1,19 +1,56 @@ import datetime +import re import sys from asyncio import get_running_loop -from typing import Dict +from typing import Dict, List import aiohttp import humanize -from aiohttp import ClientResponseError, ClientTimeout, hdrs +from aiohttp import ClientResponseError, ClientTimeout, TCPConnector, hdrs from aiohttp.web_exceptions import HTTPNotFound from rich import print as rprint from rich.table import Column, Table from structlog.stdlib import BoundLogger +import backy.backup +from backy.revision import Revision from backy.utils import format_datetime_local +class APIClientManager: + connector: TCPConnector + peers: dict + clients: dict[str, "APIClient"] + log: BoundLogger + + def __init__(self, peers, log): + self.connector = TCPConnector() + self.peers = peers + self.clients = dict() + self.log = log.bind(subsystem="APIClientManager") + + def __getitem__(self, name): + if name and name not in self.clients: + self.clients[name] = APIClient.from_conf( + name, self.peers[name], self.log, self.connector + ) + return self.clients[name] + + def __iter__(self): + return iter(self.peers) + + async def close(self): + for c in self.clients.values(): + await c.close() + await self.connector.close() + + async def __aenter__(self) -> "APIClientManager": + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close() + + class APIClient: log: BoundLogger server_name: str @@ -25,6 +62,7 @@ def __init__( url: str, token: str, log, + connector=None, ): assert get_running_loop().is_running() self.log = log.bind(subsystem="APIClient") @@ -34,6 +72,8 @@ def __init__( headers={hdrs.AUTHORIZATION: "Bearer " + token}, raise_for_status=True, timeout=ClientTimeout(30, connect=10), + connector=connector, + connector_owner=connector is None, ) @classmethod @@ -46,7 +86,7 @@ def from_conf(cls, server_name, conf, *args, **kwargs): **kwargs, ) - async def fetch_status(self, filter=""): + async def fetch_status(self, filter: str = ""): async with self.session.get( "/v1/status", params={"filter": filter} ) as response: @@ -66,14 +106,49 @@ async def reload_daemon(self): async with self.session.post(f"/v1/reload") as response: return - async def get_jobs(self): + async def get_jobs(self) -> List[dict]: async with self.session.get("/v1/jobs") as response: return await response.json() - async def run_job(self, name): + async def run_job(self, name: str): async with self.session.post(f"/v1/jobs/{name}/run") as response: return + async def list_backups(self) -> List[str]: + async with self.session.get("/v1/backups") as response: + return await response.json() + + async def run_purge(self, name: str): + async with self.session.post(f"/v1/backups/{name}/purge") as response: + return + + async def touch_backup(self, name: str): + async with self.session.post(f"/v1/backups/{name}/touch") as response: + return + + async def get_revs( + self, backup: "backy.backup.Backup", only_clean: bool = True + ) -> List[Revision]: + async with self.session.get( + f"/v1/backups/{backup.name}/revs", + params={"only_clean": int(only_clean)}, + ) as response: + json = await response.json() + revs = [Revision.from_dict(r, backup, self.log) for r in json] + for r in revs: + r.backend_type = "" + r.orig_tags = r.tags + r.location = self.server_name + return revs + + async def put_tags(self, rev: Revision, autoremove: bool = False): + async with self.session.put( + f"/v1/backups/{rev.backup.name}/revs/{rev.uuid}/tags", + json={"old_tags": list(rev.orig_tags), "new_tags": list(rev.tags)}, + params={"autoremove": int(autoremove)}, + ) as response: + return + async def close(self): await self.session.close() @@ -143,15 +218,31 @@ async def jobs(self, filter_re=""): next_time, job["next_tags"], ) + backups = await self.api.list_backups() + if filter_re: + backups = list(filter(re.compile(filter_re).search, backups)) + for b in backups: + t.add_row( + b, + "-", + "-", + "Dead", + "-", + "", + "-", + "-", + "", + ) rprint(t) - print("{} jobs shown".format(len(jobs))) + print("{} jobs shown".format(len(jobs) + len(backups))) async def status(self): """Show job status overview""" t = Table("Status", "#") state_summary: Dict[str, int] = {} jobs = await self.api.get_jobs() + jobs += [{"status": "Dead"} for _ in await self.api.list_backups()] for job in jobs: state_summary.setdefault(job["status"], 0) state_summary[job["status"]] += 1 @@ -195,6 +286,10 @@ async def check(self): "check-manual-tags", manual_tags=job["manual_tags"], ) + if job["unsynced_revs"]: + self.log.info( + "check-unsynced-revs", unsynced_revs=job["unsynced_revs"] + ) if job["sla"] != "OK": log.critical( "check-sla-violation", diff --git a/src/backy/daemon.py b/src/backy/daemon.py index 377f98c5..de4e2151 100644 --- a/src/backy/daemon.py +++ b/src/backy/daemon.py @@ -6,7 +6,7 @@ import sys import time from pathlib import Path -from typing import IO, List, Optional +from typing import IO, List, Optional, Pattern import yaml from structlog.stdlib import BoundLogger @@ -168,6 +168,9 @@ def start(self, loop): self._apply_config() loop.create_task(self.purge_old_files(), name="purge-old-files") + loop.create_task( + self.purge_pending_backups(), name="purge-pending-backups" + ) loop.create_task(self.shutdown_loop(), name="shutdown-cleanup") def handle_signals(signum): @@ -252,7 +255,7 @@ async def shutdown_loop(self): self.log.info("stopping-loop") self.loop.stop() - def status(self, filter_re=None): + def status(self, filter_re: Optional[Pattern[str]] = None) -> List[dict]: """Collects status information for all jobs.""" result = [] for job in list(self.jobs.values()): @@ -260,10 +263,13 @@ def status(self, filter_re=None): continue job.backup.scan() manual_tags = set() + unsynced_revs = 0 if job.backup.clean_history: last = job.backup.clean_history[-1] for rev in job.backup.clean_history: manual_tags |= filter_manual_tags(rev.tags) + if rev.pending_changes: + unsynced_revs += 1 else: last = None result.append( @@ -289,6 +295,7 @@ def status(self, filter_re=None): ), manual_tags=", ".join(manual_tags), quarantine_reports=len(job.backup.quarantine.report_ids), + unsynced_revs=unsynced_revs, ) ) return result @@ -298,18 +305,52 @@ async def purge_old_files(self): # properly async, we might want to spawn those off into a separate # thread. while True: - self.log.info("purge-scanning") - for candidate in os.scandir(self.base_dir): - if not candidate.is_dir(follow_symlinks=False): - continue - self.log.debug("purge-candidate", candidate=candidate.path) - reference_time = time.time() - 3 * 31 * 24 * 60 * 60 - if not has_recent_changes(candidate, reference_time): - self.log.info("purging", candidate=candidate.path) - shutil.rmtree(candidate) - self.log.info("purge-finished") + try: + self.log.info("purge-scanning") + for candidate in os.scandir(self.base_dir): + if not candidate.is_dir(follow_symlinks=False): + continue + self.log.debug("purge-candidate", candidate=candidate.path) + reference_time = time.time() - 3 * 31 * 24 * 60 * 60 + if not has_recent_changes(candidate, reference_time): + self.log.info("purging", candidate=candidate.path) + shutil.rmtree(candidate) + self.log.info("purge-finished") + except Exception: + self.log.exception("purge") await asyncio.sleep(24 * 60 * 60) + async def purge_pending_backups(self): + # `stat` and other file system access things are _not_ + # properly async, we might want to spawn those off into a separate + # thread. + while True: + try: + self.log.info("purge-pending-scanning") + for candidate in os.scandir(self.base_dir): + if ( + not candidate.is_dir(follow_symlinks=False) + or candidate.name in self.jobs # will get purged anyway + or not p.exists( + p.join(candidate.path, ".purge_pending") + ) + ): + continue + self.log.info("purging-pending", job=candidate.name) + await Job(self, candidate.name, self.log).run_purge() + self.log.info("purge-pending-finished") + except Exception: + self.log.exception("purge-pending") + await asyncio.sleep(24 * 60 * 60) + + def find_dead_backups(self) -> List[str]: + self.log.debug("scanning-backups") + return [ + b.name + for b in os.scandir(self.base_dir) + if b.is_dir(follow_symlinks=False) and b.name not in self.jobs + ] + def main(config_file: Path, log: BoundLogger): # pragma: no cover global daemon diff --git a/src/backy/logging.py b/src/backy/logging.py index 4c37a5a8..0a9b5f7b 100644 --- a/src/backy/logging.py +++ b/src/backy/logging.py @@ -237,6 +237,16 @@ def write(line): stderr = event_dict.pop("stderr", None) stack = event_dict.pop("stack", None) exception_traceback = event_dict.pop("exception_traceback", None) + exc_style = event_dict.pop("exc_style", "long") + match exc_style: + case "short": + exception_traceback = None + case "banner": + exception_traceback = ( + "\n" + + event_dict.get("exception_msg", exception_traceback) + + "\n" + ) write( " ".join( @@ -292,6 +302,7 @@ def process_exc_info(logger, name, event_dict): the exception yet. """ exc_info = event_dict.get("exc_info", None) + exc_style = event_dict.get("exc_style", None) if isinstance(exc_info, BaseException): event_dict["exc_info"] = ( @@ -301,7 +312,7 @@ def process_exc_info(logger, name, event_dict): ) elif isinstance(exc_info, tuple): pass - elif exc_info: + elif exc_info or exc_style: event_dict["exc_info"] = sys.exc_info() return event_dict diff --git a/src/backy/main.py b/src/backy/main.py index 18dd7e1e..e6ab6531 100644 --- a/src/backy/main.py +++ b/src/backy/main.py @@ -49,6 +49,7 @@ def status(self, yaml_: bool, revision: str) -> None: Column("Duration", justify="right"), "Tags", "Trust", + "Location", ) for r in revs: @@ -68,6 +69,7 @@ def status(self, yaml_: bool, revision: str) -> None: duration, ",".join(r.tags), r.trust.value, + r.location, ) rprint(t) @@ -153,7 +155,7 @@ async def run(): try: await getattr(c, apifunc)(**kwargs) except ClientConnectionError as e: - c.log.error("connection-error", _output=str(e)) + c.log.error("connection-error", exc_style="banner") c.log.debug("connection-error", exc_info=True) sys.exit(1) @@ -163,7 +165,6 @@ async def run(): def setup_argparser(): parser = argparse.ArgumentParser( description="Backup and restore for block devices.", - formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( @@ -173,7 +174,6 @@ def setup_argparser(): "-l", "--logfile", type=Path, - default=argparse.SUPPRESS, help=( "file name to write log output in. " "(default: /var/log/backy.log for `scheduler`, " @@ -196,13 +196,17 @@ def setup_argparser(): # CLIENT client = subparsers.add_parser( "client", - help="""\ -Query the api -""", + help="Query the api", ) g = client.add_argument_group() - g.add_argument("-c", "--config", type=Path, default="/etc/backy.conf") - g.add_argument("-p", "--peer") + g.add_argument( + "-c", + "--config", + type=Path, + default="/etc/backy.conf", + help="(default: %(default)s)", + ) + g.add_argument("-p", "--peer", help="(default: read from config file)") g = client.add_argument_group() g.add_argument("--url") g.add_argument("--token") @@ -244,31 +248,25 @@ def setup_argparser(): # CLIENT check p = client_parser.add_parser( "check", - help="""\ -Check whether all jobs adhere to their schedules' SLA. -""", + help="Check whether all jobs adhere to their schedules' SLA", ) p.set_defaults(apifunc="check") # BACKUP p = subparsers.add_parser( "backup", - help="""\ -Perform a backup. -""", + help="Perform a backup", ) p.add_argument( "-f", "--force", action="store_true", help="Do not validate tags" ) - p.add_argument("tags", help="Tags to apply to the backup.") + p.add_argument("tags", help="Tags to apply to the backup") p.set_defaults(func="backup") # RESTORE p = subparsers.add_parser( "restore", - help="""\ -Restore (a given revision) to a given target. -""", + help="Restore (a given revision) to a given target", ) p.add_argument( "--backend", @@ -276,29 +274,26 @@ def setup_argparser(): choices=list(RestoreBackend), default=RestoreBackend.AUTO, dest="restore_backend", + help="(default: %(default)s)" ) p.add_argument( "-r", "--revision", metavar="SPEC", default="latest", - help="use revision SPEC as restore source", + help="use revision SPEC as restore source (default: %(default)s)", ) p.add_argument( "target", metavar="TARGET", - help="""\ -Copy backed up revision to TARGET. Use stdout if TARGET is "-". -""", + help='Copy backed up revision to TARGET. Use stdout if TARGET is "-"', ) p.set_defaults(func="restore") # BACKUP p = subparsers.add_parser( "purge", - help="""\ -Purge the backup store (i.e. chunked) from unused data. -""", + help="Purge the backup store (i.e. chunked) from unused data", ) p.set_defaults(func="purge") @@ -324,9 +319,7 @@ def setup_argparser(): # STATUS p = subparsers.add_parser( "status", - help="""\ -Show backup status. Show inventory and summary information. -""", + help="Show backup status. Show inventory and summary information", ) p.add_argument("--yaml", dest="yaml_", action="store_true") p.add_argument( @@ -341,28 +334,28 @@ def setup_argparser(): # upgrade p = subparsers.add_parser( "upgrade", - help="""\ -Upgrade this backup (incl. its data) to the newest supported version. -""", + help="Upgrade this backup (incl. its data) to the newest supported version", ) p.set_defaults(func="upgrade") # SCHEDULER DAEMON p = subparsers.add_parser( "scheduler", - help="""\ -Run the scheduler. -""", + help="Run the scheduler", ) p.set_defaults(func="scheduler") - p.add_argument("-c", "--config", type=Path, default="/etc/backy.conf") + p.add_argument( + "-c", + "--config", + type=Path, + default="/etc/backy.conf", + help="(default: %(default)s)", + ) # DISTRUST p = subparsers.add_parser( "distrust", - help="""\ -Distrust specified revisions. -""", + help="Distrust specified revisions", ) p.add_argument( "-r", @@ -376,9 +369,7 @@ def setup_argparser(): # VERIFY p = subparsers.add_parser( "verify", - help="""\ -Verify specified revisions. -""", + help="Verify specified revisions", ) p.add_argument( "-r", @@ -392,9 +383,7 @@ def setup_argparser(): # FORGET p = subparsers.add_parser( "forget", - help="""\ -Forget specified revisions. -""", + help="Forget specified revision", ) p.add_argument( "-r", @@ -405,6 +394,49 @@ def setup_argparser(): ) p.set_defaults(func="forget") + # TAGS + p = subparsers.add_parser( + "tags", + help="Modify tags on revision", + ) + p.add_argument( + "--autoremove", + action="store_true", + help="Remove revision if no tags remain", + ) + p.add_argument( + "-f", "--force", action="store_true", help="Do not validate tags" + ) + p.add_argument( + "--expect", + metavar="", + help="Do nothing if tags differ from the expected tags", + ) + p.add_argument( + "action", + choices=["set", "add", "remove"], + ) + p.add_argument( + "-r", + "--revision", + metavar="SPEC", + default="all", + help="modify tags for revision SPEC, modifies all if not given (default: %(default)s)", + ) + p.add_argument( + "tags", + metavar="", + help="comma separated list of tags", + ) + p.set_defaults(func="tags") + + # EXPIRE + p = subparsers.add_parser( + "expire", + help="Expire tags according to schedule", + ) + p.set_defaults(func="expire") + return parser, client @@ -419,9 +451,6 @@ def main(): client_parser.print_usage() sys.exit(0) - if not hasattr(args, "logfile"): - args.logfile = None - default_logfile: Optional[Path] match args.func: case "scheduler": diff --git a/src/backy/revision.py b/src/backy/revision.py index 996bde1a..98da82a5 100644 --- a/src/backy/revision.py +++ b/src/backy/revision.py @@ -39,8 +39,10 @@ class Revision(object): timestamp: datetime.datetime stats: dict tags: set[str] + orig_tags: set[str] trust: Trust = Trust.TRUSTED backend_type: Literal["cowfile", "chunked"] = "chunked" + location: str = "" log: BoundLogger def __init__( @@ -55,6 +57,7 @@ def __init__( self.timestamp = timestamp if timestamp else utils.now() self.stats = {"bytes_written": 0} self.tags = set() + self.orig_tags = set() self.log = log.bind(revision_uuid=self.uuid, subsystem="revision") @classmethod @@ -79,12 +82,20 @@ def backend(self) -> "BackyBackend": def load(cls, file: Path, backup: "Backup", log: BoundLogger) -> "Revision": with file.open(encoding="utf-8") as f: metadata = yaml.safe_load(f) - assert metadata["timestamp"].tzinfo == datetime.timezone.utc - r = Revision( - backup, log, uuid=metadata["uuid"], timestamp=metadata["timestamp"] - ) + r = cls.from_dict(metadata, backup, log) + return r + + @classmethod + def from_dict(cls, metadata, backup, log): + ts = metadata["timestamp"] + if isinstance(ts, str): + ts = datetime.datetime.fromisoformat(ts) + assert ts.tzinfo == datetime.timezone.utc + r = Revision(backup, log, uuid=metadata["uuid"], timestamp=ts) r.stats = metadata.get("stats", {}) r.tags = set(metadata.get("tags", [])) + r.orig_tags = set(metadata.get("orig_tags", [])) + r.location = metadata.get("location", "") # Assume trusted by default to support migration r.trust = Trust(metadata.get("trust", Trust.TRUSTED.value)) # If the metadata does not show the backend type, then it's cowfile. @@ -109,6 +120,7 @@ def write_info(self) -> None: self.log.debug("writing-info", tags=", ".join(self.tags)) with SafeFile(self.info_filename, encoding="utf-8") as f: f.open_new("wb") + f.write("# Please use the `backy tags` subcommand to edit tags\n") yaml.safe_dump(self.to_dict(), f) def to_dict(self) -> dict: @@ -122,8 +134,14 @@ def to_dict(self) -> dict: "stats": self.stats, "trust": self.trust.value, "tags": list(self.tags), + "orig_tags": list(self.orig_tags), + "location": self.location, } + @property + def pending_changes(self): + return self.location and self.tags != self.orig_tags + def distrust(self) -> None: self.log.info("distrusted") self.trust = Trust.DISTRUSTED @@ -132,16 +150,22 @@ def verify(self) -> None: self.log.info("verified") self.trust = Trust.VERIFIED - def remove(self) -> None: + def remove(self, force=False) -> None: self.log.info("remove") - for filename in self.filename.parent.glob(self.filename.name + "*"): - if filename.exists(): - self.log.debug("remove-start", filename=filename) - filename.unlink() - self.log.debug("remove-end", filename=filename) - - if self in self.backup.history: - self.backup.history.remove(self) + if not force and self.location: + self.log.debug("remove-remote", location=self.location) + self.tags = set() + self.write_info() + else: + for filename in self.filename.parent.glob(self.filename.name + "*"): + if filename.exists(): + self.log.debug("remove-start", filename=filename) + filename.unlink() + self.log.debug("remove-end", filename=filename) + + if self in self.backup.history: + self.backup.history.remove(self) + del self.backup._by_uuid[self.uuid] def writable(self) -> None: if self.filename.exists(): diff --git a/src/backy/scheduler.py b/src/backy/scheduler.py index 4a0455a5..9c1e6f59 100644 --- a/src/backy/scheduler.py +++ b/src/backy/scheduler.py @@ -44,12 +44,12 @@ def __init__(self, daemon, name, log): self.name = name self.log = log.bind(job_name=name, subsystem="job") self.run_immediately = asyncio.Event() + self.path = self.daemon.base_dir / self.name + self.logfile = self.path / "backy.log" def configure(self, config): self.source = config["source"] self.schedule_name = config["schedule"] - self.path = self.daemon.base_dir / self.name - self.logfile = self.path / "backy.log" self.update_config() self.backup = Backup(self.path, self.log) self.last_config = config @@ -190,7 +190,9 @@ async def run_forever(self): self.update_config() await self.run_backup(next_tags) + await self.pull_metadata() await self.run_expiry() + await self.push_metadata() await self.run_purge() await self.run_callback() except asyncio.CancelledError: @@ -214,6 +216,18 @@ async def run_forever(self): self.backoff = 0 self.update_status("finished") + async def pull_metadata(self): + try: + await self.backup.pull_metadata(self.daemon.peers) + except Exception: + self.log.exception("pull-metadata-failed") + + async def push_metadata(self): + try: + await self.backup.push_metadata(self.daemon.peers) + except Exception: + self.log.exception("push-metadata-failed") + async def run_backup(self, tags): self.log.info("backup-started", tags=", ".join(tags)) proc = await asyncio.create_subprocess_exec( @@ -250,8 +264,38 @@ async def run_backup(self, tags): raise async def run_expiry(self): - self.log.info("expiring-revs") - self.schedule.expire(self.backup) + self.log.info("expiry-started") + proc = await asyncio.create_subprocess_exec( + BACKY_CMD, + "-b", + self.path, + "-l", + self.logfile, + "expire", + close_fds=True, + start_new_session=True, # Avoid signal propagation like Ctrl-C + stdin=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + try: + return_code = await proc.wait() + self.log.info( + "expiry-finished", + return_code=return_code, + subprocess_pid=proc.pid, + ) + if return_code: + raise RuntimeError( + f"Expiry failed with return code {return_code}" + ) + except asyncio.CancelledError: + self.log.warning("expiry-cancelled") + try: + proc.terminate() + except ProcessLookupError: + pass + raise async def run_purge(self): self.log.info("purge-started") diff --git a/src/backy/tests/test_api.py b/src/backy/tests/test_api.py new file mode 100644 index 00000000..6d1c45b7 --- /dev/null +++ b/src/backy/tests/test_api.py @@ -0,0 +1,317 @@ +import asyncio +import os +import os.path as p +import shutil + +import pytest +import yaml +from aiohttp.test_utils import unused_port + +from backy import utils +from backy.daemon import BackyDaemon +from backy.revision import Revision + + +async def wait_api_ready(daemon): + while daemon.reload_api.is_set(): + await asyncio.sleep(0.1) + + +@pytest.fixture +async def daemons(tmp_path, log, monkeypatch): + daemons: list[BackyDaemon] = [] # type: ignore + + async def create_daemons(count): + ports = [unused_port() for _ in range(count)] + for i in range(count): + daemon_dir = tmp_path / f"daemon{i}" + os.mkdir(daemon_dir) + daemon = BackyDaemon( + daemon_dir / "config", log.bind(logger=f"server-{i}") + ) + source = str(daemon_dir / "test01.source") + extra_conf = { + "api": { + "addrs": "localhost", + "port": ports[i], + "tokens": { + f"authtoken-{j}-{i}": f"server-{j}" + for j in range(count) + if i != j + }, + }, + "peers": { + f"server-{j}": { + "url": f"http://localhost:{ports[j]}", + "token": f"authtoken-{i}-{j}", + } + for j in range(count) + if i != j + }, + } + with open(str(daemon_dir / "config"), "w") as f: + f.write( + f"""\ +--- +global: + base-dir: {str(daemon_dir)} +schedules: + default: + daily: + interval: 24h + keep: 9 +jobs: + test01: + source: + type: file + filename: {source} + schedule: default + foo00: + source: + type: file + filename: {source} + schedule: default +""" + + yaml.safe_dump(extra_conf) + ) + + with open(source, "w") as f: + f.write("I am your father, Luke!") + + def fake_create_task(coro, *args, **kwargs): + coro.close() + return None + + with monkeypatch.context() as m: + m.setattr( + asyncio.get_running_loop(), "create_task", fake_create_task + ) + daemon.start(asyncio.get_running_loop()) + daemon.api_server() + + await wait_api_ready(daemon) + + daemons.append(daemon) + + return daemons + + yield create_daemons + + for d in daemons: + d.terminate() + + +def create_rev(backup, log): + rev = Revision.create(backup, {"manual:a"}, log) + rev.timestamp = utils.now() + rev.stats["duration"] = 60.0 + rev.materialize() + backup.scan() + return rev + + +async def modify_authtokens( + daemons: list[BackyDaemon], + src: list[int], + dest: list[int], + allow: bool, + bidirectional=False, +): + for d in dest: + for s in src: + if allow: + daemons[d].api_tokens[f"authtoken-{s}-{d}"] = f"server-{s}" + else: + daemons[d].api_tokens.pop(f"authtoken-{s}-{d}", None) + daemons[d].reload_api.set() + await wait_api_ready(daemons[d]) + if bidirectional: + await modify_authtokens(daemons, dest, src, allow) + + +async def test_remove_peer(daemons, log): + ds = await daemons(2) + + j0 = ds[0].jobs["test01"] + b0 = j0.backup + rev0 = create_rev(b0, log) + + assert [r.uuid for r in b0.history] == [rev0.uuid] + + rev0.location = "unknown" + rev0.materialize() + b0.scan() + + await j0.pull_metadata() + b0.scan() + assert [r.uuid for r in b0.history] == [] + + +async def test_remove_remote_backup(daemons, log): + ds = await daemons(2) + + j0 = ds[0].jobs["test01"] + b0 = j0.backup + rev0 = create_rev(b0, log) + + j1 = ds[1].jobs["test01"] + b1 = j1.backup + rev1 = create_rev(b1, log) + + assert [r.uuid for r in b0.history] == [rev0.uuid] + + del ds[1].jobs["test01"] + await j0.pull_metadata() + b0.scan() + assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] + + shutil.rmtree(b1.path) + await j0.pull_metadata() + b0.scan() + assert [r.uuid for r in b0.history] == [rev0.uuid] + + +async def test_simple_sync(daemons, log): + ds = await daemons(3) + + j0 = ds[0].jobs["test01"] + b0 = j0.backup + rev0 = create_rev(b0, log) + + j1 = ds[1].jobs["test01"] + b1 = j1.backup + rev1 = create_rev(b1, log) + + ds[2].api_addrs = [] + ds[2].reload_api.set() + await wait_api_ready(ds[2]) + + assert [r.uuid for r in b0.history] == [rev0.uuid] + + await j0.pull_metadata() + b0.scan() + + assert [r.uuid for r in b0.history] == [rev0.uuid] + + del ds[1].jobs["test01"] + await j0.pull_metadata() + b0.scan() + + assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] + new_rev1 = b0.history[1] + assert new_rev1.backup == b0 + assert new_rev1.timestamp == rev1.timestamp + assert new_rev1.backend_type == "" + assert new_rev1.stats == rev1.stats + assert new_rev1.tags == rev1.tags + assert new_rev1.orig_tags == new_rev1.tags + assert new_rev1.trust == rev1.trust + assert new_rev1.location == "server-1" + + new_rev1.remove() + assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] + assert new_rev1.tags == set() + assert new_rev1.orig_tags == rev1.tags + + await j0.push_metadata() + b0.scan() + b1.scan() + + assert [r.uuid for r in b0.history] == [rev0.uuid] + assert [r.uuid for r in b1.history] == [] + assert p.exists(p.join(j1.path, ".purge_pending")) + + +async def test_split_brain(daemons, log): + ds = await daemons(4) + + await modify_authtokens(ds, [0, 1], [2, 3], allow=False, bidirectional=True) + + js = [d.jobs["test01"] for d in ds] + bs = [j.backup for j in js] + revs = [create_rev(b, log) for b in bs] + + for b, r in zip(bs, revs): + assert [r.uuid for r in b.history] == [r.uuid] + + for j in js: + await j.pull_metadata() + j.backup.scan() + + for b, r in zip(bs, revs): + assert [r.uuid for r in b.history] == [r.uuid] + + del ds[0].jobs["test01"] + del ds[2].jobs["test01"] + + await js[1].pull_metadata() + await js[3].pull_metadata() + + bs[1].scan() + bs[3].scan() + + assert [r.uuid for r in bs[1].history] == [ + revs[0].uuid, + revs[1].uuid, + ] + assert [r.uuid for r in bs[3].history] == [ + revs[2].uuid, + revs[3].uuid, + ] + + await modify_authtokens(ds, [2, 3], [0], allow=True) + + await js[3].pull_metadata() + bs[3].scan() + + assert [r.uuid for r in bs[3].history] == [ + revs[0].uuid, + revs[2].uuid, + revs[3].uuid, + ] + + bs[1].history[0].tags.add("manual:new1") + bs[3].history[0].remove() + + await js[1].push_metadata() + await js[3].push_metadata() # fails + + bs[0].scan() + assert bs[0].history[0].tags == {"manual:new1", "manual:a"} + + await js[1].pull_metadata() + await js[3].pull_metadata() + + bs[1].scan() + bs[3].scan() + + assert [(r.uuid, r.tags) for r in bs[1].history] == [ + (revs[0].uuid, {"manual:a", "manual:new1"}), + (revs[1].uuid, {"manual:a"}), + ] + assert [(r.uuid, r.tags) for r in bs[3].history] == [ + (revs[0].uuid, {"manual:a", "manual:new1"}), + (revs[2].uuid, {"manual:a"}), + (revs[3].uuid, {"manual:a"}), + ] + + await modify_authtokens( + ds, [0, 1, 2, 3], [0, 1, 2, 3], allow=True, bidirectional=True + ) + + await js[1].pull_metadata() + await js[3].pull_metadata() + + bs[1].scan() + bs[3].scan() + + assert [(r.uuid, r.tags) for r in bs[1].history] == [ + (revs[0].uuid, {"manual:a", "manual:new1"}), + (revs[1].uuid, {"manual:a"}), + (revs[2].uuid, {"manual:a"}), + ] + assert [(r.uuid, r.tags) for r in bs[3].history] == [ + (revs[0].uuid, {"manual:a", "manual:new1"}), + (revs[2].uuid, {"manual:a"}), + (revs[3].uuid, {"manual:a"}), + ] diff --git a/src/backy/tests/test_backy.py b/src/backy/tests/test_backy.py index 7428638d..a76b650e 100644 --- a/src/backy/tests/test_backy.py +++ b/src/backy/tests/test_backy.py @@ -141,31 +141,31 @@ def test_smoketest_external(): Diffing restore_state2.img against img_state2.img. Success. Restoring img_state1.img from level 3. Done. Diffing restore_state1.img against img_state1.img. Success. -┏━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━┓ -┃ Date ┃ ┃ ┃ ┃ ┃ ┃ -┃ ... ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ -┡━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━┩ -│ ... │ ... │ 512.0 KiB │ a moment │ manual:test │ trusted │ -│ ... │ │ │ │ │ │ -│ ... │ ... │ 512.0 KiB │ a moment │ daily │ trusted │ -│ ... │ │ │ │ │ │ -│ ... │ ... │ 512.0 KiB │ a moment │ test │ trusted │ -│ ... │ │ │ │ │ │ -│ ... │ ... │ 512.0 KiB │ a moment │ manual:test │ trusted │ -│ ... │ │ │ │ │ │ -└───────────────┴───────────────┴───────────┴──────────┴─────────────┴─────────┘ +┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━┓ +┃ Date ┃ ┃ ┃ ┃ ┃ ┃ ┃ +┃ ... ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Location ┃ +┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━┩ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ daily │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ test │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +└───────────┴───────────┴───────────┴──────────┴──────────┴─────────┴──────────┘ 4 revisions containing 2.0 MiB data (estimated) -┏━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━┓ -┃ Date ┃ ┃ ┃ ┃ ┃ ┃ -┃ ... ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ -┡━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━┩ -│ ... │ ... │ 512.0 KiB │ a moment │ manual:test │ trusted │ -│ ... │ │ │ │ │ │ -│ ... │ ... │ 512.0 KiB │ a moment │ test │ trusted │ -│ ... │ │ │ │ │ │ -│ ... │ ... │ 512.0 KiB │ a moment │ manual:test │ trusted │ -│ ... │ │ │ │ │ │ -└───────────────┴───────────────┴───────────┴──────────┴─────────────┴─────────┘ +┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━┓ +┃ Date ┃ ┃ ┃ ┃ ┃ ┃ ┃ +┃ ... ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Location ┃ +┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━┩ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ test │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +└───────────┴───────────┴───────────┴──────────┴──────────┴─────────┴──────────┘ 3 revisions containing 1.5 MiB data (estimated) """ ) diff --git a/src/backy/tests/test_client.py b/src/backy/tests/test_client.py index 2d11d019..cb7d14e4 100644 --- a/src/backy/tests/test_client.py +++ b/src/backy/tests/test_client.py @@ -90,8 +90,9 @@ async def test_cli_jobs(cli_client, capsys): │ test01 │ OK │ - │ waiti… │ - │ │ - │ ... │ daily │ │ │ │ │ for │ │ │ │ ... │ │ │ │ │ │ deadl… │ │ │ │ │ │ +│ dead01 │ - │ - │ Dead │ - │ │ - │ - │ │ └────────┴─────┴────────┴────────┴────────┴────────┴────────┴─────────┴────────┘ -2 jobs shown +3 jobs shown """ ) == out @@ -143,6 +144,7 @@ async def test_cli_status(cli_client, capsys): ┏━━━━━━━━━━━━━━━━━━━━━━┳━━━┓ ┃ Status ┃ # ┃ ┡━━━━━━━━━━━━━━━━━━━━━━╇━━━┩ +│ Dead │ 1 │ │ waiting for deadline │ 2 │ └──────────────────────┴───┘ """ @@ -209,7 +211,7 @@ async def test_cli_runall(daemon, cli_client, monkeypatch): """\ ... D - api/new-conn path='/v1/jobs' query='' ... D - api/auth-passed client='cli' path='/v1/jobs' query='' -... D - api/request-result client='cli' path='/v1/jobs' query='' response=... +... D - api/request-result client='cli' path='/v1/jobs' query='' response=... status_code=200 ... D - api/new-conn path='/v1/jobs/test01/run' query='' ... D - api/auth-passed client='cli' path='/v1/jobs/test01/run' query='' ... D - api/request-result client='cli' path='/v1/jobs/test01/run' query='' status_code=202 @@ -257,7 +259,7 @@ async def test_cli_check_ok(daemon, cli_client): """\ ... D - api/new-conn path='/v1/status' query='filter=' ... D - api/auth-passed client='cli' path='/v1/status' query='filter=' -... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... +... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... status_code=200 ... I - CLIClient/check-exit exitcode=0 jobs=2 """ ) @@ -282,7 +284,7 @@ async def test_cli_check_too_old(daemon, clock, cli_client, log): """\ ... D - api/new-conn path='/v1/status' query='filter=' ... D - api/auth-passed client='cli' path='/v1/status' query='filter=' -... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... +... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... status_code=200 ... C test01 CLIClient/check-sla-violation last_time='2015-08-30 07:06:47+00:00' sla_overdue=172800.0 ... I - CLIClient/check-exit exitcode=2 jobs=2 """ @@ -307,7 +309,7 @@ async def test_cli_check_manual_tags(daemon, cli_client, log): """\ ... D - api/new-conn path='/v1/status' query='filter=' ... D - api/auth-passed client='cli' path='/v1/status' query='filter=' -... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... +... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... status_code=200 ... I test01 CLIClient/check-manual-tags manual_tags='manual:test' ... I - CLIClient/check-exit exitcode=0 jobs=2 """ @@ -330,7 +332,7 @@ async def test_cli_check_quarantine(daemon, cli_client, log): """\ ... D - api/new-conn path='/v1/status' query='filter=' ... D - api/auth-passed client='cli' path='/v1/status' query='filter=' -... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... +... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... status_code=200 ... W test01 CLIClient/check-quarantined reports=1 ... I - CLIClient/check-exit exitcode=1 jobs=2 """ diff --git a/src/backy/tests/test_daemon.py b/src/backy/tests/test_daemon.py index b3452fc0..d176646e 100644 --- a/src/backy/tests/test_daemon.py +++ b/src/backy/tests/test_daemon.py @@ -4,6 +4,7 @@ import re import signal from pathlib import Path +from unittest import mock import pytest import yaml @@ -54,6 +55,8 @@ async def daemon(tmp_path, log): with open(source, "w") as f: f.write("I am your father, Luke!") + tmp_path.joinpath("dead01").mkdir() + daemon.start(asyncio.get_running_loop()) yield daemon daemon.terminate() @@ -164,7 +167,7 @@ async def test_run_callback(daemon, log): assert isinstance(r["tags"][0], str) assert isinstance(r["stats"]["bytes_written"], int) assert isinstance(r["stats"]["duration"], float) - # assert isinstance(r["location"], str) + assert isinstance(r["location"], str) def test_spread(daemon): @@ -362,6 +365,8 @@ async def wait_for_job_finished(): exception>\tException ... W test01 job/backoff backoff=480 ... I test01 job/waiting next_tags='daily' next_time='2015-09-01 09:14:47' +... I test01 backup/pull-start \n\ +... I test01 backup/push-start \n\ ... I test01 job/stop \n\ ... I test01 job/waiting next_tags='daily' next_time='2015-09-02 07:32:51' """ @@ -380,3 +385,19 @@ def test_daemon_status(daemon): def test_daemon_status_filter_re(daemon): r = re.compile(r"foo\d\d") assert {"foo00"} == set([s["job"] for s in daemon.status(r)]) + + +async def test_purge_pending(daemon, monkeypatch): + run_purge = mock.Mock() + monkeypatch.setattr("backy.scheduler.Job.run_purge", run_purge) + monkeypatch.setattr( + "asyncio.sleep", mock.Mock(side_effect=asyncio.CancelledError()) + ) + + daemon.jobs["test01"].backup.set_purge_pending() + del daemon.jobs["test01"] + + with pytest.raises(asyncio.CancelledError): + await daemon.purge_pending_backups() + + run_purge.assert_called_once() diff --git a/src/backy/tests/test_main.py b/src/backy/tests/test_main.py index 4ae37b62..317fa57f 100644 --- a/src/backy/tests/test_main.py +++ b/src/backy/tests/test_main.py @@ -30,7 +30,7 @@ def test_display_usage(capsys, argv): """\ usage: pytest [-h] [-v] [-l LOGFILE] [-b BACKUPDIR] {client,backup,restore,purge,find,status,\ -upgrade,scheduler,distrust,verify,forget} +upgrade,scheduler,distrust,verify,forget,tags,expire} ... """ == out @@ -65,7 +65,7 @@ def test_display_help(capsys, argv): """\ usage: pytest [-h] [-v] [-l LOGFILE] [-b BACKUPDIR] {client,backup,restore,purge,find,status,\ -upgrade,scheduler,distrust,verify,forget} +upgrade,scheduler,distrust,verify,forget,tags,expire} ... Backup and restore for block devices. @@ -329,6 +329,75 @@ def test_call_scheduler(capsys, backup, argv, monkeypatch, tmp_path): assert exit.value.code == 0 +@pytest.mark.parametrize("action", ["set", "add", "remove"]) +def test_call_tags(capsys, backup, argv, monkeypatch, action): + monkeypatch.setattr(backy.main.Command, "tags", print_args) + argv.extend( + ["-v", "-b", str(backup.path), "tags", action, "-r", "last", "manual:a"] + ) + with pytest.raises(SystemExit) as exit: + backy.main.main() + assert exit.value.code == 0 + out, err = capsys.readouterr() + assert ( + Ellipsis( + f"""\ +(,) +{{'action': '{action}', + 'autoremove': False, + 'expect': None, + 'force': False, + 'revision': 'last', + 'tags': 'manual:a'}} +""" + ) + == out + ) + assert ( + Ellipsis( + f"""\ +... D quarantine/scan entries=0 +... D command/invoked args='... -v -b ... tags {action} -r last manual:a' +... D command/parsed func='tags' func_args={{'autoremove': False, 'force': False, 'expect': None, \ +'action': '{action}', 'revision': 'last', 'tags': 'manual:a'}} +... D command/successful \n\ +""" + ) + == utils.log_data + ) + assert exit.value.code == 0 + + +def test_call_expire(capsys, backup, argv, monkeypatch): + monkeypatch.setattr(backy.main.Command, "expire", print_args) + argv.extend(["-v", "-b", str(backup.path), "expire"]) + with pytest.raises(SystemExit) as exit: + backy.main.main() + assert exit.value.code == 0 + out, err = capsys.readouterr() + assert ( + Ellipsis( + """\ +(,) +{} +""" + ) + == out + ) + assert ( + Ellipsis( + """\ +... D quarantine/scan entries=0 +... D command/invoked args='... -v -b ... expire' +... D command/parsed func='expire' func_args={} +... D command/successful \n\ +""" + ) + == utils.log_data + ) + assert exit.value.code == 0 + + def test_call_unexpected_exception( capsys, backup, argv, monkeypatch, log, tmp_path ): @@ -380,11 +449,11 @@ def test_commands_wrapper_status( assert err == "" assert out == Ellipsis( """\ -┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━┳━━━━━━━━━┳━━━━━━━━━━┳━━━━━━┳━━━━━━━━━┓ -┃ Date (...) ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ -┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━╇━━━━━━━━━╇━━━━━━━━━━╇━━━━━━╇━━━━━━━━━┩ -│ ... │ 1 │ 0 Bytes │ - │ │ trusted │ -└──────────────────────┴────┴─────────┴──────────┴──────┴─────────┘ +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━┳━━━━━━━━━┳━━━━━━━━━━┳━━━━━━┳━━━━━━━━━┳━━━━━━━━━━┓ +┃ Date (...) ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Location ┃ +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━╇━━━━━━━━━╇━━━━━━━━━━╇━━━━━━╇━━━━━━━━━╇━━━━━━━━━━┩ +│ ... │ 1 │ 0 Bytes │ - │ │ trusted │ │ +└──────────────────────┴────┴─────────┴──────────┴──────┴─────────┴──────────┘ 1 revisions containing 0 Bytes data (estimated) """ ) @@ -408,6 +477,8 @@ def test_commands_wrapper_status_yaml( out == f"""\ - backend_type: {backup.default_backend_type} + location: '' + orig_tags: [] parent: '' stats: bytes_written: 42 diff --git a/src/backy/tests/test_revision.py b/src/backy/tests/test_revision.py index 6e3a8208..c5fb8b9b 100644 --- a/src/backy/tests/test_revision.py +++ b/src/backy/tests/test_revision.py @@ -4,7 +4,7 @@ import yaml -import backy +import backy.utils from backy.revision import Revision UTC = datetime.timezone.utc @@ -71,6 +71,8 @@ def test_store_revision_data(backup, clock, log): "uuid": "asdf2", "stats": {"bytes_written": 0}, "tags": [], + "orig_tags": [], + "location": "", "trust": "trusted", "timestamp": datetime.datetime(2015, 9, 1, 7, 6, 47, tzinfo=UTC), } @@ -86,6 +88,8 @@ def test_store_revision_data_no_parent(backup, clock, log): "uuid": "asdf2", "stats": {"bytes_written": 0}, "tags": [], + "orig_tags": [], + "location": "", "trust": "trusted", "timestamp": datetime.datetime(2015, 9, 1, 7, 6, 47, tzinfo=UTC), } diff --git a/src/backy/tests/test_scheduler.py b/src/backy/tests/test_scheduler.py index 6fd31954..279d75fd 100644 --- a/src/backy/tests/test_scheduler.py +++ b/src/backy/tests/test_scheduler.py @@ -7,20 +7,23 @@ from backy.scheduler import Job -@pytest.mark.asyncio -async def test_wait_for_deadline_no_deadline_fails(log): +@pytest.fixture +def daemon(tmp_path): daemon = mock.Mock() + daemon.base_dir = tmp_path + return daemon + + +async def test_wait_for_deadline_no_deadline_fails(daemon, log): job = Job(daemon, "dummy", log) - # Not having a a deadline set causes this to fail (immediately) + # Not having a deadline set causes this to fail (immediately) with pytest.raises(TypeError): await job._wait_for_deadline() -@pytest.mark.asyncio -async def test_wait_for_deadline(log): - daemon = mock.Mock() +async def test_wait_for_deadline(daemon, log): job = Job(daemon, "dummy", log) - # Not having a a deadline set causes this to fail. + # Not having a deadline set causes this to fail. now = backy.utils.now() job.next_time = now + datetime.timedelta(seconds=0.3) result = await job._wait_for_deadline() @@ -28,9 +31,7 @@ async def test_wait_for_deadline(log): assert backy.utils.now() - now >= datetime.timedelta(seconds=0.3) -@pytest.mark.asyncio -async def test_wait_for_deadline_1000(log): - daemon = mock.Mock() +async def test_wait_for_deadline_1000(daemon, log): job = Job(daemon, "dummy", log) # Large deadline now = backy.utils.now() From e62093e7903a4a1d26065e978b8f7e72bfdfe449 Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Thu, 15 Jun 2023 01:12:53 +0200 Subject: [PATCH 02/13] add taskid --- ...26_005856_jb_issue_30_server_migration.rst | 2 + src/backy/api.py | 52 +++++++++---- src/backy/backup.py | 9 ++- src/backy/client.py | 9 ++- src/backy/conftest.py | 4 +- src/backy/daemon.py | 2 +- src/backy/logging.py | 43 ++++++----- src/backy/main.py | 56 ++++++++++++-- src/backy/scheduler.py | 24 ++++-- src/backy/tests/test_client.py | 76 ++++++++++--------- src/backy/tests/test_daemon.py | 35 +++++---- src/backy/tests/test_main.py | 8 +- src/backy/utils.py | 5 ++ 13 files changed, 215 insertions(+), 110 deletions(-) diff --git a/changelog.d/20230626_005856_jb_issue_30_server_migration.rst b/changelog.d/20230626_005856_jb_issue_30_server_migration.rst index ec2acfa5..e6845df7 100644 --- a/changelog.d/20230626_005856_jb_issue_30_server_migration.rst +++ b/changelog.d/20230626_005856_jb_issue_30_server_migration.rst @@ -5,3 +5,5 @@ - Add `expire` subcommand - logging: improve exception formatting + +- logging: add taskid diff --git a/src/backy/api.py b/src/backy/api.py index b39495d2..5d0ebccb 100644 --- a/src/backy/api.py +++ b/src/backy/api.py @@ -11,7 +11,6 @@ HTTPForbidden, HTTPNotFound, HTTPPreconditionFailed, - HTTPPreconditionRequired, HTTPServiceUnavailable, HTTPUnauthorized, ) @@ -23,6 +22,7 @@ from backy.backup import Backup from backy.revision import Revision from backy.scheduler import Job +from backy.utils import generate_taskid class BackyJSONEncoder(JSONEncoder): @@ -43,7 +43,7 @@ class BackyAPI: log: BoundLogger def __init__(self, daemon, log): - self.log = log.bind(subsystem="api") + self.log = log.bind(subsystem="api", job_name="~") self.daemon = daemon self.sites = {} self.app = web.Application( @@ -98,9 +98,12 @@ async def reconfigure( @middleware async def log_conn(self, request: web.Request, handler): request["log"] = self.log.bind( - path=request.path, query=request.query_string + sub_taskid=request.headers.get("taskid"), + taskid=generate_taskid(), + ) + request["log"].debug( + "new-conn", path=request.path, query=request.query_string ) - request["log"].debug("new-conn") try: resp = await handler(request) except Exception as e: @@ -128,8 +131,7 @@ async def require_auth(self, request: web.Request, handler): request["log"].info("auth-token-unknown") raise HTTPUnauthorized() request["client"] = client - request["log"] = request["log"].bind(client=client) - request["log"].debug("auth-passed") + request["log"] = request["log"].bind(job_name="~" + client) return await handler(request) @middleware @@ -144,36 +146,43 @@ async def to_json(self, request: web.Request, handler): async def get_status(self, request: web.Request) -> List[dict]: filter = request.query.get("filter", None) + request["log"].info("get-status", filter=filter) if filter: filter = re.compile(filter) return self.daemon.status(filter) async def reload_daemon(self, request: web.Request): + request["log"].info("reload-daemon") self.daemon.reload() async def get_jobs(self, request: web.Request) -> List[Job]: + request["log"].info("get-jobs") return list(self.daemon.jobs.values()) async def get_job(self, request: web.Request) -> Job: + name = request.match_info.get("job_name", None) + request["log"].info("get-job", name=name) try: - name = request.match_info.get("job_name", None) return self.daemon.jobs[name] except KeyError: + request["log"].info("get-job-not-found", name=name) raise HTTPNotFound() async def run_job(self, request: web.Request): j = await self.get_job(request) + request["log"].info("run-job", name=j.name) j.run_immediately.set() raise HTTPAccepted() async def list_backups(self, request: web.Request) -> List[str]: + request["log"].info("list-backups") return self.daemon.find_dead_backups() async def get_backup(self, request: web.Request) -> Backup: name = request.match_info.get("backup_name", None) - if not name: - raise HTTPNotFound() + request["log"].info("get-backups", name=name) if name in self.daemon.jobs: + request["log"].info("get-backups-forbidden", name=name) raise HTTPForbidden() try: path = Path(self.daemon.base_dir).joinpath(name).resolve() @@ -184,19 +193,23 @@ async def get_backup(self, request: web.Request) -> Backup: raise FileNotFoundError return Backup(path, request["log"]) except FileNotFoundError: + request["log"].info("get-backups-not-found", name=name) raise HTTPNotFound() async def run_purge(self, request: web.Request): backup = await self.get_backup(request) + request["log"].info("run-purge", name=backup.name) backup.set_purge_pending() raise HTTPAccepted() async def touch_backup(self, request: web.Request): backup = await self.get_backup(request) + request["log"].info("touch-backup", name=backup.name) backup.touch() async def get_revs(self, request: web.Request) -> List[Revision]: backup = await self.get_backup(request) + request["log"].info("get-revs", name=backup.name) if request.query.get("only_clean", "") == "1": revs = backup.clean_history else: @@ -205,16 +218,23 @@ async def get_revs(self, request: web.Request) -> List[Revision]: async def put_tags(self, request: web.Request): json = await request.json() - if "old_tags" not in json: - raise HTTPPreconditionRequired() - old_tags = set(json["old_tags"]) - if "new_tags" not in json: + try: + old_tags = set(json["old_tags"]) + new_tags = set(json["new_tags"]) + except KeyError: + request["log"].info("put-tags-bad-request") raise HTTPBadRequest() - new_tags = set(json["new_tags"]) - autoremove = request.query.get("autoremove", "") == "1" spec = request.match_info.get("rev_spec", None) backup = await self.get_backup(request) + request["log"].info( + "put-tags", + name=backup.name, + old_tags=old_tags, + new_tags=new_tags, + spec=spec, + autoremove=autoremove, + ) try: if not backup.tags( "set", @@ -226,6 +246,8 @@ async def put_tags(self, request: web.Request): ): raise HTTPPreconditionFailed() except KeyError: + request["log"].info("put-tags-rev-not-found") raise HTTPNotFound() except BlockingIOError: + request["log"].info("put-tags-locked") raise HTTPServiceUnavailable() diff --git a/src/backy/backup.py b/src/backy/backup.py index af3a35de..cb2099b6 100644 --- a/src/backy/backup.py +++ b/src/backy/backup.py @@ -718,9 +718,10 @@ def find(self, spec: str) -> Revision: ################### # Syncing Revisions + # called by the scheduler without a subprocess @locked(target=".backup", mode="exclusive") - async def push_metadata(self, peers): + async def push_metadata(self, peers, taskid: str): grouped = defaultdict(list) for r in self.history: if r.pending_changes: @@ -728,7 +729,7 @@ async def push_metadata(self, peers): self.log.info( "push-start", changes=sum(len(l) for l in grouped.values()) ) - async with APIClientManager(peers, self.log) as apis: + async with APIClientManager(peers, taskid, self.log) as apis: await asyncio.gather( *[ self._push_metadata(apis[server], grouped[server]) @@ -777,7 +778,7 @@ async def _push_metadata(self, api: APIClient, revs: List[Revision]): log.warning("push-purge-error", exc_info=True) @locked(target=".backup", mode="exclusive") - async def pull_metadata(self, peers: dict): + async def pull_metadata(self, peers: dict, taskid: str): async def remove_dead_peer(): for r in list(self.history): if r.location and r.location not in peers: @@ -785,7 +786,7 @@ async def remove_dead_peer(): r.remove(force=True) self.log.info("pull-start") - async with APIClientManager(peers, self.log) as apis: + async with APIClientManager(peers, taskid, self.log) as apis: await asyncio.gather( remove_dead_peer(), *[self._pull_metadata(apis[server]) for server in apis], diff --git a/src/backy/client.py b/src/backy/client.py index 2f839f4c..cd2e2db5 100644 --- a/src/backy/client.py +++ b/src/backy/client.py @@ -21,18 +21,20 @@ class APIClientManager: connector: TCPConnector peers: dict clients: dict[str, "APIClient"] + taskid: str log: BoundLogger - def __init__(self, peers, log): + def __init__(self, peers, taskid, log): self.connector = TCPConnector() self.peers = peers self.clients = dict() + self.taskid = taskid self.log = log.bind(subsystem="APIClientManager") def __getitem__(self, name): if name and name not in self.clients: self.clients[name] = APIClient.from_conf( - name, self.peers[name], self.log, self.connector + name, self.peers[name], self.taskid, self.log, self.connector ) return self.clients[name] @@ -61,6 +63,7 @@ def __init__( server_name: str, url: str, token: str, + taskid: str, log, connector=None, ): @@ -69,7 +72,7 @@ def __init__( self.server_name = server_name self.session = aiohttp.ClientSession( url, - headers={hdrs.AUTHORIZATION: "Bearer " + token}, + headers={hdrs.AUTHORIZATION: "Bearer " + token, "taskid": taskid}, raise_for_status=True, timeout=ClientTimeout(30, connect=10), connector=connector, diff --git a/src/backy/conftest.py b/src/backy/conftest.py index dda2e421..be70ef1e 100644 --- a/src/backy/conftest.py +++ b/src/backy/conftest.py @@ -103,12 +103,10 @@ class PytestLogger: def msg(self, message: str): utils.log_data += message + "\n" - backy.logging.init_logging(True) + backy.logging.init_logging(True, defaults={"taskid": "AAAA"}) structlog.get_config()["logger_factory"].factories["file"] = PytestLogger - yield structlog.get_config()["processors"][-1] @pytest.fixture(autouse=True) def reset_structlog(setup_structlog): utils.log_data = "" - setup_structlog.default_job_name = "" diff --git a/src/backy/daemon.py b/src/backy/daemon.py index de4e2151..c1b9cfe9 100644 --- a/src/backy/daemon.py +++ b/src/backy/daemon.py @@ -116,8 +116,8 @@ def _apply_config(self): job = self.jobs[name] if config != job.last_config: self.log.info("changed-job", job_name=name) - job.configure(config) job.stop() + job.configure(config) job.start() for name, job in list(self.jobs.items()): diff --git a/src/backy/logging.py b/src/backy/logging.py index 0a9b5f7b..9e6d9b4f 100644 --- a/src/backy/logging.py +++ b/src/backy/logging.py @@ -10,6 +10,7 @@ from typing import Optional import structlog +from structlog.typing import EventDict, WrappedLogger try: import colorama @@ -136,11 +137,8 @@ class ConsoleFileRenderer: "trace", ] - def __init__( - self, min_level, default_job_name: str = "", pad_event=_EVENT_WIDTH - ): + def __init__(self, min_level, pad_event=_EVENT_WIDTH): self.min_level = self.LEVELS.index(min_level.lower()) - self.default_job_name = default_job_name if colorama is None: print( _MISSING.format(who=self.__class__.__name__, package="colorama") @@ -167,7 +165,9 @@ def __init__( max(self._level_to_color.keys(), key=lambda e: len(e)) ) - def __call__(self, logger, method_name, event_dict): + def __call__( + self, logger: WrappedLogger, method_name: str, event_dict: EventDict + ): console_io = io.StringIO() log_io = io.StringIO() @@ -203,9 +203,9 @@ def write(line): + " " ) - pid = event_dict.pop("pid", None) - if pid is not None: - write(DIM + str(pid) + RESET_ALL + " ") + taskid = event_dict.pop("taskid", None) + if taskid is not None: + write(DIM + str(taskid) + RESET_ALL + " ") level = event_dict.pop("level", None) if level is not None: @@ -213,7 +213,10 @@ def write(line): self._level_to_color[level] + level[0].upper() + RESET_ALL + " " ) - job_name = event_dict.pop("job_name", self.default_job_name) + job_name = event_dict.pop("job_name", "") + sub_taskid = event_dict.pop("sub_taskid", None) + if sub_taskid: + job_name += f"[{sub_taskid}]" if job_name: write(job_name.ljust(20) + " ") @@ -290,11 +293,6 @@ def write(line): return {"console": console_io.getvalue(), "file": log_io.getvalue()} -def add_pid(logger, method_name, event_dict): - event_dict["pid"] = os.getpid() - return event_dict - - def process_exc_info(logger, name, event_dict): """Transforms exc_info to the exception tuple format returned by sys.exc_info(). Uses the the same logic as as structlog's format_exc_info() @@ -337,19 +335,30 @@ def format_exc_info(logger, name, event_dict): return event_dict +class EventDictDefaults: + def __init__(self, defaults: dict): + self.defaults = defaults + + def __call__( + self, logger: WrappedLogger, method_name: str, event_dict: EventDict + ) -> EventDict: + for k, v in self.defaults.items(): + event_dict.setdefault(k, v) + return event_dict + + def init_logging( verbose: bool, logfile: Optional[Path] = None, - default_job_name: str = "", + defaults: Optional[dict] = None, ): console_file_renderer = ConsoleFileRenderer( min_level="trace" if verbose else "info", - default_job_name=default_job_name, ) processors = [ - add_pid, + EventDictDefaults(defaults or dict()), structlog.processors.add_log_level, process_exc_info, format_exc_info, diff --git a/src/backy/main.py b/src/backy/main.py index e6ab6531..552dd0e4 100644 --- a/src/backy/main.py +++ b/src/backy/main.py @@ -17,7 +17,7 @@ from structlog.stdlib import BoundLogger import backy.daemon -from backy.utils import format_datetime_local +from backy.utils import format_datetime_local, generate_taskid from . import logging from .backup import Backup, RestoreBackend @@ -28,10 +28,12 @@ class Command(object): """Proxy between CLI calls and actual backup code.""" path: Path + taskid: str log: BoundLogger - def __init__(self, path: Path, log: BoundLogger): + def __init__(self, path: Path, taskid, log: BoundLogger): self.path = path + self.taskid = taskid self.log = log def status(self, yaml_: bool, revision: str) -> None: @@ -141,15 +143,17 @@ def client( ) -> None: async def run(): if url and token: - api = APIClient("", url, token, self.log) + api = APIClient("", url, token, self.taskid, self.log) else: d = backy.daemon.BackyDaemon(config, self.log) d._read_config() if peer: - api = APIClient.from_conf(peer, d.peers[peer], self.log) + api = APIClient.from_conf( + peer, d.peers[peer], self.taskid, self.log + ) else: api = APIClient.from_conf( - "", d.api_cli_default, self.log + "", d.api_cli_default, self.taskid, self.log ) async with CLIClient(api, self.log) as c: try: @@ -161,6 +165,35 @@ async def run(): asyncio.run(run()) + def tags( + self, + action: Literal["set", "add", "remove"], + autoremove: bool, + expect: Optional[str], + revision: str, + tags: str, + force: bool, + ) -> int: + tags_ = set(t.strip() for t in tags.split(",")) + if expect is None: + expect_ = None + else: + expect_ = set(t.strip() for t in expect.split(",")) + b = backy.backup.Backup(self.path, self.log) + success = b.tags( + action, + revision, + tags_, + expect=expect_, + autoremove=autoremove, + force=force, + ) + return int(not success) + + def expire(self) -> None: + b = backy.backup.Backup(self.path, self.log) + b.expire() + def setup_argparser(): parser = argparse.ArgumentParser( @@ -176,7 +209,7 @@ def setup_argparser(): type=Path, help=( "file name to write log output in. " - "(default: /var/log/backy.log for `scheduler`, " + "(default: /var/log/backy.log for `scheduler`, ignored for `client`, " "$backupdir/backy.log otherwise)" ), ) @@ -190,6 +223,12 @@ def setup_argparser(): "(default: %(default)s)" ), ) + parser.add_argument( + "-t", + "--taskid", + default=generate_taskid(), + help="id to include in log messages (default: 4 random base32 chars)", + ) subparsers = parser.add_subparsers() @@ -472,12 +511,12 @@ def main(): logging.init_logging( args.verbose, args.logfile or default_logfile, - default_job_name=default_job_name, + defaults={"job_name": default_job_name, "taskid": args.taskid}, ) log = structlog.stdlib.get_logger(subsystem="command") log.debug("invoked", args=" ".join(sys.argv)) - command = Command(args.backupdir, log) + command = Command(args.backupdir, args.taskid, log) func = getattr(command, args.func) # Pass over to function @@ -486,6 +525,7 @@ def main(): del func_args["verbose"] del func_args["backupdir"] del func_args["logfile"] + del func_args["taskid"] try: log.debug("parsed", func=args.func, func_args=func_args) diff --git a/src/backy/scheduler.py b/src/backy/scheduler.py index 9c1e6f59..e2ed2987 100644 --- a/src/backy/scheduler.py +++ b/src/backy/scheduler.py @@ -17,7 +17,12 @@ from .backup import Backup from .ext_deps import BACKY_CMD -from .utils import SafeFile, format_datetime_local, time_or_event +from .utils import ( + SafeFile, + format_datetime_local, + generate_taskid, + time_or_event, +) class Job(object): @@ -35,6 +40,7 @@ class Job(object): run_immediately: asyncio.Event errors: int = 0 backoff: int = 0 + taskid: str = "" log: BoundLogger _task: Optional[asyncio.Task] = None @@ -146,7 +152,10 @@ async def run_forever(self): self.backoff = 0 self.log.debug("loop-started") while True: - self.backup.scan() + self.taskid = generate_taskid() + self.log = self.log.bind(job_name=self.name, sub_taskid=self.taskid) + + self.backup = Backup(self.path, self.log) next_time, next_tags = self.schedule.next( backy.utils.now(), self.spread, self.backup @@ -188,7 +197,6 @@ async def run_forever(self): async with self.daemon.backup_semaphores[speed]: self.update_status(f"running ({speed})") - self.update_config() await self.run_backup(next_tags) await self.pull_metadata() await self.run_expiry() @@ -218,13 +226,13 @@ async def run_forever(self): async def pull_metadata(self): try: - await self.backup.pull_metadata(self.daemon.peers) + await self.backup.pull_metadata(self.daemon.peers, self.taskid) except Exception: self.log.exception("pull-metadata-failed") async def push_metadata(self): try: - await self.backup.push_metadata(self.daemon.peers) + await self.backup.push_metadata(self.daemon.peers, self.taskid) except Exception: self.log.exception("push-metadata-failed") @@ -232,6 +240,8 @@ async def run_backup(self, tags): self.log.info("backup-started", tags=", ".join(tags)) proc = await asyncio.create_subprocess_exec( BACKY_CMD, + "-t", + self.taskid, "-b", str(self.path), "-l", @@ -267,6 +277,8 @@ async def run_expiry(self): self.log.info("expiry-started") proc = await asyncio.create_subprocess_exec( BACKY_CMD, + "-t", + self.taskid, "-b", self.path, "-l", @@ -301,6 +313,8 @@ async def run_purge(self): self.log.info("purge-started") proc = await asyncio.create_subprocess_exec( BACKY_CMD, + "-t", + self.taskid, "-b", str(self.path), "-l", diff --git a/src/backy/tests/test_client.py b/src/backy/tests/test_client.py index cb7d14e4..267f964d 100644 --- a/src/backy/tests/test_client.py +++ b/src/backy/tests/test_client.py @@ -15,9 +15,9 @@ from .test_daemon import daemon -@pytest.fixture(autouse=True) -def configure_logging(setup_structlog): - setup_structlog.default_job_name = "-" +@pytest.fixture +def log(log): + return log.bind(job_name="-") @pytest.fixture @@ -59,10 +59,12 @@ async def test_api_wrong_token(api, token, method, endpoint, aiohttp_client): async def api_client(api, aiohttp_client, log): client = await aiohttp_client( api.app, - headers={hdrs.AUTHORIZATION: "Bearer testtoken"}, + headers={hdrs.AUTHORIZATION: "Bearer testtoken", "taskid": "ABCD"}, raise_for_status=True, ) - api_client = APIClient("", "http://localhost:0", "", log) + api_client = APIClient( + "", "http://localhost:0", "token", "task", log + ) await api_client.session.close() api_client.session = client return api_client @@ -164,9 +166,10 @@ async def test_cli_run(daemon, cli_client, monkeypatch): assert ( Ellipsis( """\ -... D - api/new-conn path='/v1/jobs/test01/run' query='' -... D - api/auth-passed client='cli' path='/v1/jobs/test01/run' query='' -... D - api/request-result client='cli' path='/v1/jobs/test01/run' query='' status_code=202 +... D ~[ABCD] api/new-conn path='/v1/jobs/test01/run' query='' +... I ~cli[ABCD] api/get-job name='test01' +... I ~cli[ABCD] api/run-job name='test01' +... D ~cli[ABCD] api/request-result status_code=202 ... I - CLIClient/triggered-run job='test01' """ ) @@ -185,9 +188,10 @@ async def test_cli_run_missing(daemon, cli_client): assert ( Ellipsis( """\ -... D - api/new-conn path='/v1/jobs/aaaa/run' query='' -... D - api/auth-passed client='cli' path='/v1/jobs/aaaa/run' query='' -... D - api/request-result client='cli' path='/v1/jobs/aaaa/run' query='' status_code=404 +... D ~[ABCD] api/new-conn path='/v1/jobs/aaaa/run' query='' +... I ~cli[ABCD] api/get-job name='aaaa' +... I ~cli[ABCD] api/get-job-not-found name='aaaa' +... D ~cli[ABCD] api/request-result status_code=404 ... E - CLIClient/unknown-job job='aaaa' """ ) @@ -209,16 +213,18 @@ async def test_cli_runall(daemon, cli_client, monkeypatch): assert ( Ellipsis( """\ -... D - api/new-conn path='/v1/jobs' query='' -... D - api/auth-passed client='cli' path='/v1/jobs' query='' -... D - api/request-result client='cli' path='/v1/jobs' query='' response=... status_code=200 -... D - api/new-conn path='/v1/jobs/test01/run' query='' -... D - api/auth-passed client='cli' path='/v1/jobs/test01/run' query='' -... D - api/request-result client='cli' path='/v1/jobs/test01/run' query='' status_code=202 +... D ~[ABCD] api/new-conn path='/v1/jobs' query='' +... I ~cli[ABCD] api/get-jobs \n\ +... D ~cli[ABCD] api/request-result response=... status_code=200 +... D ~[ABCD] api/new-conn path='/v1/jobs/test01/run' query='' +... I ~cli[ABCD] api/get-job name='test01' +... I ~cli[ABCD] api/run-job name='test01' +... D ~cli[ABCD] api/request-result status_code=202 ... I - CLIClient/triggered-run job='test01' -... D - api/new-conn path='/v1/jobs/foo00/run' query='' -... D - api/auth-passed client='cli' path='/v1/jobs/foo00/run' query='' -... D - api/request-result client='cli' path='/v1/jobs/foo00/run' query='' status_code=202 +... D ~[ABCD] api/new-conn path='/v1/jobs/foo00/run' query='' +... I ~cli[ABCD] api/get-job name='foo00' +... I ~cli[ABCD] api/run-job name='foo00' +... D ~cli[ABCD] api/request-result status_code=202 ... I - CLIClient/triggered-run job='foo00' """ ) @@ -238,9 +244,9 @@ async def test_cli_reload(daemon, cli_client, monkeypatch): Ellipsis( """\ ... I - CLIClient/reloading-daemon \n\ -... D - api/new-conn path='/v1/reload' query='' -... D - api/auth-passed client='cli' path='/v1/reload' query='' -... D - api/request-result client='cli' path='/v1/reload' query='' status_code=204 +... D ~[ABCD] api/new-conn path='/v1/reload' query='' +... I ~cli[ABCD] api/reload-daemon \n\ +... D ~cli[ABCD] api/request-result status_code=204 ... I - CLIClient/reloaded-daemon \n\ """ ) @@ -257,9 +263,9 @@ async def test_cli_check_ok(daemon, cli_client): assert ( Ellipsis( """\ -... D - api/new-conn path='/v1/status' query='filter=' -... D - api/auth-passed client='cli' path='/v1/status' query='filter=' -... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... status_code=200 +... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' +... I ~cli[ABCD] api/get-status filter='' +... D ~cli[ABCD] api/request-result response=... status_code=200 ... I - CLIClient/check-exit exitcode=0 jobs=2 """ ) @@ -282,9 +288,9 @@ async def test_cli_check_too_old(daemon, clock, cli_client, log): assert ( Ellipsis( """\ -... D - api/new-conn path='/v1/status' query='filter=' -... D - api/auth-passed client='cli' path='/v1/status' query='filter=' -... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... status_code=200 +... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' +... I ~cli[ABCD] api/get-status filter='' +... D ~cli[ABCD] api/request-result response=... status_code=200 ... C test01 CLIClient/check-sla-violation last_time='2015-08-30 07:06:47+00:00' sla_overdue=172800.0 ... I - CLIClient/check-exit exitcode=2 jobs=2 """ @@ -307,9 +313,9 @@ async def test_cli_check_manual_tags(daemon, cli_client, log): assert ( Ellipsis( """\ -... D - api/new-conn path='/v1/status' query='filter=' -... D - api/auth-passed client='cli' path='/v1/status' query='filter=' -... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... status_code=200 +... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' +... I ~cli[ABCD] api/get-status filter='' +... D ~cli[ABCD] api/request-result response=... status_code=200 ... I test01 CLIClient/check-manual-tags manual_tags='manual:test' ... I - CLIClient/check-exit exitcode=0 jobs=2 """ @@ -330,9 +336,9 @@ async def test_cli_check_quarantine(daemon, cli_client, log): assert ( Ellipsis( """\ -... D - api/new-conn path='/v1/status' query='filter=' -... D - api/auth-passed client='cli' path='/v1/status' query='filter=' -... D - api/request-result client='cli' path='/v1/status' query='filter=' response=... status_code=200 +... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' +... I ~cli[ABCD] api/get-status filter='' +... D ~cli[ABCD] api/request-result response=... status_code=200 ... W test01 CLIClient/check-quarantined reports=1 ... I - CLIClient/check-exit exitcode=1 jobs=2 """ diff --git a/src/backy/tests/test_daemon.py b/src/backy/tests/test_daemon.py index d176646e..7d2ee315 100644 --- a/src/backy/tests/test_daemon.py +++ b/src/backy/tests/test_daemon.py @@ -336,39 +336,44 @@ async def wait_for_job_finished(): assert ( Ellipsis( """\ -... D test01 job/loop-started \n\ -... I test01 job/waiting next_tags='daily' next_time='2015-09-02 07:32:51' -... E test01 job/exception exception_class='builtins.Exception' exception_msg='' +... D test01[...] job/loop-started \n\ +... D test01[...] quarantine/scan entries=0 +... I test01[...] job/waiting next_tags='daily' next_time='2015-09-02 07:32:51' +... E test01[...] job/exception exception_class='builtins.Exception' exception_msg='' exception>\tTraceback (most recent call last): exception>\t File "/.../src/backy/scheduler.py", line ..., in run_forever exception>\t await self.run_backup(next_tags) exception>\t File "/.../src/backy/tests/test_daemon.py", line ..., in failing_coroutine exception>\t raise Exception() exception>\tException -... W test01 job/backoff backoff=120 -... I test01 job/waiting next_tags='daily' next_time='2015-09-01 09:08:47' -... E test01 job/exception exception_class='builtins.Exception' exception_msg='' +... W test01[...] job/backoff backoff=120 +... D test01[...] quarantine/scan entries=0 +... I test01[...] job/waiting next_tags='daily' next_time='2015-09-01 09:08:47' +... E test01[...] job/exception exception_class='builtins.Exception' exception_msg='' exception>\tTraceback (most recent call last): exception>\t File "/.../src/backy/scheduler.py", line ..., in run_forever exception>\t await self.run_backup(next_tags) exception>\t File "/.../src/backy/tests/test_daemon.py", line ..., in failing_coroutine exception>\t raise Exception() exception>\tException -... W test01 job/backoff backoff=240 -... I test01 job/waiting next_tags='daily' next_time='2015-09-01 09:10:47' -... E test01 job/exception exception_class='builtins.Exception' exception_msg='' +... W test01[...] job/backoff backoff=240 +... D test01[...] quarantine/scan entries=0 +... I test01[...] job/waiting next_tags='daily' next_time='2015-09-01 09:10:47' +... E test01[...] job/exception exception_class='builtins.Exception' exception_msg='' exception>\tTraceback (most recent call last): exception>\t File "/.../src/backy/scheduler.py", line ..., in run_forever exception>\t await self.run_backup(next_tags) exception>\t File "/.../src/backy/tests/test_daemon.py", line ..., in failing_coroutine exception>\t raise Exception() exception>\tException -... W test01 job/backoff backoff=480 -... I test01 job/waiting next_tags='daily' next_time='2015-09-01 09:14:47' -... I test01 backup/pull-start \n\ -... I test01 backup/push-start \n\ -... I test01 job/stop \n\ -... I test01 job/waiting next_tags='daily' next_time='2015-09-02 07:32:51' +... W test01[...] job/backoff backoff=480 +... D test01[...] quarantine/scan entries=0 +... I test01[...] job/waiting next_tags='daily' next_time='2015-09-01 09:14:47' +... I test01[...] backup/pull-start \n\ +... I test01[...] backup/push-start \n\ +... I test01[...] job/stop \n\ +... D test01[...] quarantine/scan entries=0 +... I test01[...] job/waiting next_tags='daily' next_time='2015-09-02 07:32:51' """ ) == utils.log_data diff --git a/src/backy/tests/test_main.py b/src/backy/tests/test_main.py index 317fa57f..6ad480e4 100644 --- a/src/backy/tests/test_main.py +++ b/src/backy/tests/test_main.py @@ -28,7 +28,7 @@ def test_display_usage(capsys, argv): out, err = capsys.readouterr() assert ( """\ -usage: pytest [-h] [-v] [-l LOGFILE] [-b BACKUPDIR] +usage: pytest [-h] [-v] [-l LOGFILE] [-b BACKUPDIR] [-t TASKID] {client,backup,restore,purge,find,status,\ upgrade,scheduler,distrust,verify,forget,tags,expire} ... @@ -63,7 +63,7 @@ def test_display_help(capsys, argv): assert ( Ellipsis( """\ -usage: pytest [-h] [-v] [-l LOGFILE] [-b BACKUPDIR] +usage: pytest [-h] [-v] [-l LOGFILE] [-b BACKUPDIR] [-t TASKID] {client,backup,restore,purge,find,status,\ upgrade,scheduler,distrust,verify,forget,tags,expire} ... @@ -438,7 +438,7 @@ def do_raise(*args, **kw): def test_commands_wrapper_status( backup, tmp_path, capsys, clock, tz_berlin, log ): - commands = backy.main.Command(tmp_path, log) + commands = backy.main.Command(tmp_path, "AAAA", log) revision = Revision.create(backup, set(), log, uuid="1") revision.materialize() @@ -462,7 +462,7 @@ def test_commands_wrapper_status( def test_commands_wrapper_status_yaml( backup, tmp_path, capsys, clock, tz_berlin, log ): - commands = backy.main.Command(tmp_path, log) + commands = backy.main.Command(tmp_path, "AAAA", log) revision = Revision.create(backup, set(), log, uuid="1") revision.stats["duration"] = 3.5 diff --git a/src/backy/utils.py b/src/backy/utils.py index ad72f406..1eaafb53 100644 --- a/src/backy/utils.py +++ b/src/backy/utils.py @@ -1,4 +1,5 @@ import asyncio +import base64 import contextlib import datetime import hashlib @@ -481,6 +482,10 @@ def format_datetime_local(dt): ) +def generate_taskid(): + return base64.b32encode(random.randbytes(3)).decode("utf-8")[:4] + + def unique(iterable: Iterable[_T]) -> List[_T]: return list(dict.fromkeys(iterable)) From eb3df69355fdd18cf52bba006ce27aea99ac8302 Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Wed, 11 Oct 2023 15:18:19 +0200 Subject: [PATCH 03/13] Increase log event padding --- src/backy/backup.py | 2 +- src/backy/logging.py | 8 ++- src/backy/scheduler.py | 10 +--- src/backy/tests/test_client.py | 94 +++++++++++++++++----------------- src/backy/tests/test_daemon.py | 40 +++++++-------- src/backy/tests/test_main.py | 56 ++++++++++---------- 6 files changed, 105 insertions(+), 105 deletions(-) diff --git a/src/backy/backup.py b/src/backy/backup.py index cb2099b6..0abcd72e 100644 --- a/src/backy/backup.py +++ b/src/backy/backup.py @@ -815,7 +815,7 @@ async def _pull_metadata(self, api: APIClient): log.warning("pull-error", exc_info=True) remote_revs = [] log.debug( - "pull-found-matching-server", + "pull-found-revs", revs=len(remote_revs), ) diff --git a/src/backy/logging.py b/src/backy/logging.py index 9e6d9b4f..91bc7e7d 100644 --- a/src/backy/logging.py +++ b/src/backy/logging.py @@ -12,13 +12,15 @@ import structlog from structlog.typing import EventDict, WrappedLogger +from backy import utils + try: import colorama except ImportError: colorama = None _MISSING = "{who} requires the {package} package installed." -_EVENT_WIDTH = 30 # pad the event name to so many characters +_EVENT_WIDTH = 35 # pad the event name to so many characters if sys.stderr.isatty() and colorama: COLORIZED_TTY_OUTPUT = True @@ -230,6 +232,10 @@ def write(line): + RESET_ALL + " " ) + if len(subsystem + event) > self._pad_event and hasattr( + utils, "log_data" + ): + raise RuntimeWarning("logline to long: " + subsystem + event) logger_name = event_dict.pop("logger", None) if logger_name is not None: diff --git a/src/backy/scheduler.py b/src/backy/scheduler.py index e2ed2987..7fcd4a89 100644 --- a/src/backy/scheduler.py +++ b/src/backy/scheduler.py @@ -225,16 +225,10 @@ async def run_forever(self): self.update_status("finished") async def pull_metadata(self): - try: - await self.backup.pull_metadata(self.daemon.peers, self.taskid) - except Exception: - self.log.exception("pull-metadata-failed") + await self.backup.pull_metadata(self.daemon.peers, self.taskid) async def push_metadata(self): - try: - await self.backup.push_metadata(self.daemon.peers, self.taskid) - except Exception: - self.log.exception("push-metadata-failed") + await self.backup.push_metadata(self.daemon.peers, self.taskid) async def run_backup(self, tags): self.log.info("backup-started", tags=", ".join(tags)) diff --git a/src/backy/tests/test_client.py b/src/backy/tests/test_client.py index 267f964d..b96451b3 100644 --- a/src/backy/tests/test_client.py +++ b/src/backy/tests/test_client.py @@ -166,11 +166,11 @@ async def test_cli_run(daemon, cli_client, monkeypatch): assert ( Ellipsis( """\ -... D ~[ABCD] api/new-conn path='/v1/jobs/test01/run' query='' -... I ~cli[ABCD] api/get-job name='test01' -... I ~cli[ABCD] api/run-job name='test01' -... D ~cli[ABCD] api/request-result status_code=202 -... I - CLIClient/triggered-run job='test01' +... D ~[ABCD] api/new-conn path='/v1/jobs/test01/run' query='' +... I ~cli[ABCD] api/get-job name='test01' +... I ~cli[ABCD] api/run-job name='test01' +... D ~cli[ABCD] api/request-result status_code=202 +... I - CLIClient/triggered-run job='test01' """ ) == utils.log_data @@ -188,11 +188,11 @@ async def test_cli_run_missing(daemon, cli_client): assert ( Ellipsis( """\ -... D ~[ABCD] api/new-conn path='/v1/jobs/aaaa/run' query='' -... I ~cli[ABCD] api/get-job name='aaaa' -... I ~cli[ABCD] api/get-job-not-found name='aaaa' -... D ~cli[ABCD] api/request-result status_code=404 -... E - CLIClient/unknown-job job='aaaa' +... D ~[ABCD] api/new-conn path='/v1/jobs/aaaa/run' query='' +... I ~cli[ABCD] api/get-job name='aaaa' +... I ~cli[ABCD] api/get-job-not-found name='aaaa' +... D ~cli[ABCD] api/request-result status_code=404 +... E - CLIClient/unknown-job job='aaaa' """ ) == utils.log_data @@ -213,19 +213,19 @@ async def test_cli_runall(daemon, cli_client, monkeypatch): assert ( Ellipsis( """\ -... D ~[ABCD] api/new-conn path='/v1/jobs' query='' -... I ~cli[ABCD] api/get-jobs \n\ -... D ~cli[ABCD] api/request-result response=... status_code=200 -... D ~[ABCD] api/new-conn path='/v1/jobs/test01/run' query='' -... I ~cli[ABCD] api/get-job name='test01' -... I ~cli[ABCD] api/run-job name='test01' -... D ~cli[ABCD] api/request-result status_code=202 -... I - CLIClient/triggered-run job='test01' -... D ~[ABCD] api/new-conn path='/v1/jobs/foo00/run' query='' -... I ~cli[ABCD] api/get-job name='foo00' -... I ~cli[ABCD] api/run-job name='foo00' -... D ~cli[ABCD] api/request-result status_code=202 -... I - CLIClient/triggered-run job='foo00' +... D ~[ABCD] api/new-conn path='/v1/jobs' query='' +... I ~cli[ABCD] api/get-jobs \n\ +... D ~cli[ABCD] api/request-result response=... status_code=200 +... D ~[ABCD] api/new-conn path='/v1/jobs/test01/run' query='' +... I ~cli[ABCD] api/get-job name='test01' +... I ~cli[ABCD] api/run-job name='test01' +... D ~cli[ABCD] api/request-result status_code=202 +... I - CLIClient/triggered-run job='test01' +... D ~[ABCD] api/new-conn path='/v1/jobs/foo00/run' query='' +... I ~cli[ABCD] api/get-job name='foo00' +... I ~cli[ABCD] api/run-job name='foo00' +... D ~cli[ABCD] api/request-result status_code=202 +... I - CLIClient/triggered-run job='foo00' """ ) == utils.log_data @@ -243,11 +243,11 @@ async def test_cli_reload(daemon, cli_client, monkeypatch): assert ( Ellipsis( """\ -... I - CLIClient/reloading-daemon \n\ -... D ~[ABCD] api/new-conn path='/v1/reload' query='' -... I ~cli[ABCD] api/reload-daemon \n\ -... D ~cli[ABCD] api/request-result status_code=204 -... I - CLIClient/reloaded-daemon \n\ +... I - CLIClient/reloading-daemon \n\ +... D ~[ABCD] api/new-conn path='/v1/reload' query='' +... I ~cli[ABCD] api/reload-daemon \n\ +... D ~cli[ABCD] api/request-result status_code=204 +... I - CLIClient/reloaded-daemon \n\ """ ) == utils.log_data @@ -263,10 +263,10 @@ async def test_cli_check_ok(daemon, cli_client): assert ( Ellipsis( """\ -... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' -... I ~cli[ABCD] api/get-status filter='' -... D ~cli[ABCD] api/request-result response=... status_code=200 -... I - CLIClient/check-exit exitcode=0 jobs=2 +... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' +... I ~cli[ABCD] api/get-status filter='' +... D ~cli[ABCD] api/request-result response=... status_code=200 +... I - CLIClient/check-exit exitcode=0 jobs=2 """ ) == utils.log_data @@ -288,11 +288,11 @@ async def test_cli_check_too_old(daemon, clock, cli_client, log): assert ( Ellipsis( """\ -... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' -... I ~cli[ABCD] api/get-status filter='' -... D ~cli[ABCD] api/request-result response=... status_code=200 -... C test01 CLIClient/check-sla-violation last_time='2015-08-30 07:06:47+00:00' sla_overdue=172800.0 -... I - CLIClient/check-exit exitcode=2 jobs=2 +... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' +... I ~cli[ABCD] api/get-status filter='' +... D ~cli[ABCD] api/request-result response=... status_code=200 +... C test01 CLIClient/check-sla-violation last_time='2015-08-30 07:06:47+00:00' sla_overdue=172800.0 +... I - CLIClient/check-exit exitcode=2 jobs=2 """ ) == utils.log_data @@ -313,11 +313,11 @@ async def test_cli_check_manual_tags(daemon, cli_client, log): assert ( Ellipsis( """\ -... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' -... I ~cli[ABCD] api/get-status filter='' -... D ~cli[ABCD] api/request-result response=... status_code=200 -... I test01 CLIClient/check-manual-tags manual_tags='manual:test' -... I - CLIClient/check-exit exitcode=0 jobs=2 +... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' +... I ~cli[ABCD] api/get-status filter='' +... D ~cli[ABCD] api/request-result response=... status_code=200 +... I test01 CLIClient/check-manual-tags manual_tags='manual:test' +... I - CLIClient/check-exit exitcode=0 jobs=2 """ ) == utils.log_data @@ -336,11 +336,11 @@ async def test_cli_check_quarantine(daemon, cli_client, log): assert ( Ellipsis( """\ -... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' -... I ~cli[ABCD] api/get-status filter='' -... D ~cli[ABCD] api/request-result response=... status_code=200 -... W test01 CLIClient/check-quarantined reports=1 -... I - CLIClient/check-exit exitcode=1 jobs=2 +... D ~[ABCD] api/new-conn path='/v1/status' query='filter=' +... I ~cli[ABCD] api/get-status filter='' +... D ~cli[ABCD] api/request-result response=... status_code=200 +... W test01 CLIClient/check-quarantined reports=1 +... I - CLIClient/check-exit exitcode=1 jobs=2 """ ) == utils.log_data diff --git a/src/backy/tests/test_daemon.py b/src/backy/tests/test_daemon.py index 7d2ee315..bd8eb0d2 100644 --- a/src/backy/tests/test_daemon.py +++ b/src/backy/tests/test_daemon.py @@ -336,44 +336,44 @@ async def wait_for_job_finished(): assert ( Ellipsis( """\ -... D test01[...] job/loop-started \n\ -... D test01[...] quarantine/scan entries=0 -... I test01[...] job/waiting next_tags='daily' next_time='2015-09-02 07:32:51' -... E test01[...] job/exception exception_class='builtins.Exception' exception_msg='' +... D test01[...] job/loop-started \n\ +... D test01[...] quarantine/scan entries=0 +... I test01[...] job/waiting next_tags='daily' next_time='2015-09-02 07:32:51' +... E test01[...] job/exception exception_class='builtins.Exception' exception_msg='' exception>\tTraceback (most recent call last): exception>\t File "/.../src/backy/scheduler.py", line ..., in run_forever exception>\t await self.run_backup(next_tags) exception>\t File "/.../src/backy/tests/test_daemon.py", line ..., in failing_coroutine exception>\t raise Exception() exception>\tException -... W test01[...] job/backoff backoff=120 -... D test01[...] quarantine/scan entries=0 -... I test01[...] job/waiting next_tags='daily' next_time='2015-09-01 09:08:47' -... E test01[...] job/exception exception_class='builtins.Exception' exception_msg='' +... W test01[...] job/backoff backoff=120 +... D test01[...] quarantine/scan entries=0 +... I test01[...] job/waiting next_tags='daily' next_time='2015-09-01 09:08:47' +... E test01[...] job/exception exception_class='builtins.Exception' exception_msg='' exception>\tTraceback (most recent call last): exception>\t File "/.../src/backy/scheduler.py", line ..., in run_forever exception>\t await self.run_backup(next_tags) exception>\t File "/.../src/backy/tests/test_daemon.py", line ..., in failing_coroutine exception>\t raise Exception() exception>\tException -... W test01[...] job/backoff backoff=240 -... D test01[...] quarantine/scan entries=0 -... I test01[...] job/waiting next_tags='daily' next_time='2015-09-01 09:10:47' -... E test01[...] job/exception exception_class='builtins.Exception' exception_msg='' +... W test01[...] job/backoff backoff=240 +... D test01[...] quarantine/scan entries=0 +... I test01[...] job/waiting next_tags='daily' next_time='2015-09-01 09:10:47' +... E test01[...] job/exception exception_class='builtins.Exception' exception_msg='' exception>\tTraceback (most recent call last): exception>\t File "/.../src/backy/scheduler.py", line ..., in run_forever exception>\t await self.run_backup(next_tags) exception>\t File "/.../src/backy/tests/test_daemon.py", line ..., in failing_coroutine exception>\t raise Exception() exception>\tException -... W test01[...] job/backoff backoff=480 -... D test01[...] quarantine/scan entries=0 -... I test01[...] job/waiting next_tags='daily' next_time='2015-09-01 09:14:47' -... I test01[...] backup/pull-start \n\ -... I test01[...] backup/push-start \n\ -... I test01[...] job/stop \n\ -... D test01[...] quarantine/scan entries=0 -... I test01[...] job/waiting next_tags='daily' next_time='2015-09-02 07:32:51' +... W test01[...] job/backoff backoff=480 +... D test01[...] quarantine/scan entries=0 +... I test01[...] job/waiting next_tags='daily' next_time='2015-09-01 09:14:47' +... I test01[...] backup/pull-start \n\ +... I test01[...] backup/push-start \n\ +... I test01[...] job/stop \n\ +... D test01[...] quarantine/scan entries=0 +... I test01[...] job/waiting next_tags='daily' next_time='2015-09-02 07:32:51' """ ) == utils.log_data diff --git a/src/backy/tests/test_main.py b/src/backy/tests/test_main.py index 6ad480e4..a4317cd0 100644 --- a/src/backy/tests/test_main.py +++ b/src/backy/tests/test_main.py @@ -138,9 +138,9 @@ def test_call_status(capsys, backup, argv, monkeypatch): assert ( Ellipsis( """\ -... D command/invoked args='... -v -b ... status' -... D command/parsed func='status' func_args={'yaml_': False, 'revision': 'all'} -... D command/successful \n\ +... D command/invoked args='... -v -b ... status' +... D command/parsed func='status' func_args={'yaml_': False, 'revision': 'all'} +... D command/successful \n\ """ ) == utils.log_data @@ -187,10 +187,10 @@ def test_call_backup(tmp_path, capsys, argv, monkeypatch): assert ( Ellipsis( """\ -... D command/invoked args='... -v backup manual:test' -... D command/parsed func='backup' func_args={'force': False, 'tags': 'manual:test'} -... D quarantine/scan entries=0 -... D command/successful \n\ +... D command/invoked args='... -v backup manual:test' +... D command/parsed func='backup' func_args={'force': False, 'tags': 'manual:test'} +... D quarantine/scan entries=0 +... D command/successful \n\ """ ) == utils.log_data @@ -218,9 +218,9 @@ def test_call_find(capsys, backup, argv, monkeypatch): assert ( Ellipsis( """\ -... D command/invoked args='... -v -b ... find -r 1' -... D command/parsed func='find' func_args={'uuid': False, 'revision': '1'} -... D command/successful \n\ +... D command/invoked args='... -v -b ... find -r 1' +... D command/parsed func='find' func_args={'uuid': False, 'revision': '1'} +... D command/successful \n\ """ ) == utils.log_data @@ -278,11 +278,11 @@ def test_call_client( assert ( Ellipsis( f"""\ -... D command/invoked args='... -v client -c ... {action}{" "*bool(args)}{", ".join(args.values())}' -... D command/parsed func='client' func_args={{'config': PosixPath('...'), 'peer': None, \ +... D command/invoked args='... -v client -c ... {action}{" "*bool(args)}{", ".join(args.values())}' +... D command/parsed func='client' func_args={{'config': PosixPath('...'), 'peer': None, \ 'url': None, 'token': None{", "*bool(args)}{str(args)[1:-1]}, 'apifunc': '{action}'}} -... D daemon/read-config ... -... D command/successful \n\ +... D daemon/read-config ... +... D command/successful \n\ """ ) == utils.log_data @@ -319,9 +319,9 @@ def test_call_scheduler(capsys, backup, argv, monkeypatch, tmp_path): assert ( Ellipsis( """\ -... D command/invoked args='... -v -b ... scheduler' -... D command/parsed func='scheduler' func_args={'config': PosixPath('/etc/backy.conf')} -... D command/successful \n\ +... D command/invoked args='... -v -b ... scheduler' +... D command/parsed func='scheduler' func_args={'config': PosixPath('/etc/backy.conf')} +... D command/successful \n\ """ ) == utils.log_data @@ -356,11 +356,11 @@ def test_call_tags(capsys, backup, argv, monkeypatch, action): assert ( Ellipsis( f"""\ -... D quarantine/scan entries=0 -... D command/invoked args='... -v -b ... tags {action} -r last manual:a' -... D command/parsed func='tags' func_args={{'autoremove': False, 'force': False, 'expect': None, \ +... D quarantine/scan entries=0 +... D command/invoked args='... -v -b ... tags {action} -r last manual:a' +... D command/parsed func='tags' func_args={{'autoremove': False, 'force': False, 'expect': None, \ 'action': '{action}', 'revision': 'last', 'tags': 'manual:a'}} -... D command/successful \n\ +... D command/successful \n\ """ ) == utils.log_data @@ -387,10 +387,10 @@ def test_call_expire(capsys, backup, argv, monkeypatch): assert ( Ellipsis( """\ -... D quarantine/scan entries=0 -... D command/invoked args='... -v -b ... expire' -... D command/parsed func='expire' func_args={} -... D command/successful \n\ +... D quarantine/scan entries=0 +... D command/invoked args='... -v -b ... expire' +... D command/parsed func='expire' func_args={} +... D command/successful \n\ """ ) == utils.log_data @@ -420,9 +420,9 @@ def do_raise(*args, **kw): assert ( Ellipsis( """\ -... D command/invoked args='... -l ... -b ... status' -... D command/parsed func='status' func_args={'yaml_': False, 'revision': 'all'} -... E command/failed exception_class='builtins.RuntimeError' exception_msg='test' +... D command/invoked args='... -l ... -b ... status' +... D command/parsed func='status' func_args={'yaml_': False, 'revision': 'all'} +... E command/failed exception_class='builtins.RuntimeError' exception_msg='test' exception>\tTraceback (most recent call last): exception>\t File ".../src/backy/main.py", line ..., in main exception>\t func(**func_args) From 4a6000bf98fc86881d5331a841cb2f39bfbbb6b8 Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Thu, 16 Nov 2023 15:11:38 +0100 Subject: [PATCH 04/13] rename location to server Not adding migrations for rev files as there was no release between the introduction of locations and this commit --- src/backy/api.py | 2 +- src/backy/backup.py | 14 ++++++---- src/backy/client.py | 2 +- src/backy/main.py | 4 +-- src/backy/revision.py | 12 ++++---- src/backy/tests/test_api.py | 4 +-- src/backy/tests/test_backy.py | 48 ++++++++++++++++---------------- src/backy/tests/test_daemon.py | 2 +- src/backy/tests/test_main.py | 12 ++++---- src/backy/tests/test_revision.py | 4 +-- 10 files changed, 54 insertions(+), 50 deletions(-) diff --git a/src/backy/api.py b/src/backy/api.py index 5d0ebccb..5ba7e498 100644 --- a/src/backy/api.py +++ b/src/backy/api.py @@ -214,7 +214,7 @@ async def get_revs(self, request: web.Request) -> List[Revision]: revs = backup.clean_history else: revs = backup.history - return [r for r in revs if not r.location] + return [r for r in revs if not r.server] async def put_tags(self, request: web.Request): json = await request.json() diff --git a/src/backy/backup.py b/src/backy/backup.py index 0abcd72e..d92e220d 100644 --- a/src/backy/backup.py +++ b/src/backy/backup.py @@ -725,7 +725,7 @@ async def push_metadata(self, peers, taskid: str): grouped = defaultdict(list) for r in self.history: if r.pending_changes: - grouped[r.location].append(r) + grouped[r.server].append(r) self.log.info( "push-start", changes=sum(len(l) for l in grouped.values()) ) @@ -742,7 +742,7 @@ async def _push_metadata(self, api: APIClient, revs: List[Revision]): purge_required = False for r in revs: log = self.log.bind( - server=r.location, + server=r.server, rev_uuid=r.uuid, ) log.debug( @@ -781,8 +781,12 @@ async def _push_metadata(self, api: APIClient, revs: List[Revision]): async def pull_metadata(self, peers: dict, taskid: str): async def remove_dead_peer(): for r in list(self.history): - if r.location and r.location not in peers: - self.log.info("pull-removing-dead-peer", rev_uuid=r.uuid) + if r.server and r.server not in peers: + self.log.info( + "pull-removing-dead-peer", + rev_uuid=r.uuid, + server=r.server, + ) r.remove(force=True) self.log.info("pull-start") @@ -820,7 +824,7 @@ async def _pull_metadata(self, api: APIClient): ) matching_uuids = { - r.uuid for r in self.history if r.location == api.server_name + r.uuid for r in self.history if r.server == api.server_name } remote_uuids = {r.uuid for r in remote_revs} for uuid in matching_uuids - remote_uuids: diff --git a/src/backy/client.py b/src/backy/client.py index cd2e2db5..7ce69963 100644 --- a/src/backy/client.py +++ b/src/backy/client.py @@ -141,7 +141,7 @@ async def get_revs( for r in revs: r.backend_type = "" r.orig_tags = r.tags - r.location = self.server_name + r.server = self.server_name return revs async def put_tags(self, rev: Revision, autoremove: bool = False): diff --git a/src/backy/main.py b/src/backy/main.py index 552dd0e4..f95b4e7f 100644 --- a/src/backy/main.py +++ b/src/backy/main.py @@ -51,7 +51,7 @@ def status(self, yaml_: bool, revision: str) -> None: Column("Duration", justify="right"), "Tags", "Trust", - "Location", + "Server", ) for r in revs: @@ -71,7 +71,7 @@ def status(self, yaml_: bool, revision: str) -> None: duration, ",".join(r.tags), r.trust.value, - r.location, + r.server, ) rprint(t) diff --git a/src/backy/revision.py b/src/backy/revision.py index 98da82a5..2ed90300 100644 --- a/src/backy/revision.py +++ b/src/backy/revision.py @@ -42,7 +42,7 @@ class Revision(object): orig_tags: set[str] trust: Trust = Trust.TRUSTED backend_type: Literal["cowfile", "chunked"] = "chunked" - location: str = "" + server: str = "" log: BoundLogger def __init__( @@ -95,7 +95,7 @@ def from_dict(cls, metadata, backup, log): r.stats = metadata.get("stats", {}) r.tags = set(metadata.get("tags", [])) r.orig_tags = set(metadata.get("orig_tags", [])) - r.location = metadata.get("location", "") + r.server = metadata.get("server", "") # Assume trusted by default to support migration r.trust = Trust(metadata.get("trust", Trust.TRUSTED.value)) # If the metadata does not show the backend type, then it's cowfile. @@ -135,12 +135,12 @@ def to_dict(self) -> dict: "trust": self.trust.value, "tags": list(self.tags), "orig_tags": list(self.orig_tags), - "location": self.location, + "server": self.server, } @property def pending_changes(self): - return self.location and self.tags != self.orig_tags + return self.server and self.tags != self.orig_tags def distrust(self) -> None: self.log.info("distrusted") @@ -152,8 +152,8 @@ def verify(self) -> None: def remove(self, force=False) -> None: self.log.info("remove") - if not force and self.location: - self.log.debug("remove-remote", location=self.location) + if not force and self.server: + self.log.debug("remove-remote", server=self.server) self.tags = set() self.write_info() else: diff --git a/src/backy/tests/test_api.py b/src/backy/tests/test_api.py index 6d1c45b7..7c61b847 100644 --- a/src/backy/tests/test_api.py +++ b/src/backy/tests/test_api.py @@ -138,7 +138,7 @@ async def test_remove_peer(daemons, log): assert [r.uuid for r in b0.history] == [rev0.uuid] - rev0.location = "unknown" + rev0.server = "unknown" rev0.materialize() b0.scan() @@ -206,7 +206,7 @@ async def test_simple_sync(daemons, log): assert new_rev1.tags == rev1.tags assert new_rev1.orig_tags == new_rev1.tags assert new_rev1.trust == rev1.trust - assert new_rev1.location == "server-1" + assert new_rev1.server == "server-1" new_rev1.remove() assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] diff --git a/src/backy/tests/test_backy.py b/src/backy/tests/test_backy.py index a76b650e..5a1065c5 100644 --- a/src/backy/tests/test_backy.py +++ b/src/backy/tests/test_backy.py @@ -141,31 +141,31 @@ def test_smoketest_external(): Diffing restore_state2.img against img_state2.img. Success. Restoring img_state1.img from level 3. Done. Diffing restore_state1.img against img_state1.img. Success. -┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━┓ -┃ Date ┃ ┃ ┃ ┃ ┃ ┃ ┃ -┃ ... ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Location ┃ -┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━┩ -│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ -│ ... │ │ │ │ │ │ │ -│ ... │ ... │ 512.0 KiB │ a moment │ daily │ trusted │ │ -│ ... │ │ │ │ │ │ │ -│ ... │ ... │ 512.0 KiB │ a moment │ test │ trusted │ │ -│ ... │ │ │ │ │ │ │ -│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ -│ ... │ │ │ │ │ │ │ -└───────────┴───────────┴───────────┴──────────┴──────────┴─────────┴──────────┘ +┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━┓ +┃ Date ┃ ┃ ┃ ┃ ┃ ┃ ┃ +┃ ... ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Server ┃ +┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━┩ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:te… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ daily │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ test │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:te… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +└───────────┴───────────┴───────────┴──────────┴────────────┴─────────┴────────┘ 4 revisions containing 2.0 MiB data (estimated) -┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━┓ -┃ Date ┃ ┃ ┃ ┃ ┃ ┃ ┃ -┃ ... ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Location ┃ -┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━┩ -│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ -│ ... │ │ │ │ │ │ │ -│ ... │ ... │ 512.0 KiB │ a moment │ test │ trusted │ │ -│ ... │ │ │ │ │ │ │ -│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ -│ ... │ │ │ │ │ │ │ -└───────────┴───────────┴───────────┴──────────┴──────────┴─────────┴──────────┘ +┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━┓ +┃ Date ┃ ┃ ┃ ┃ ┃ ┃ ┃ +┃ ... ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Server ┃ +┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━┩ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:te… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ test │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:te… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +└───────────┴───────────┴───────────┴──────────┴────────────┴─────────┴────────┘ 3 revisions containing 1.5 MiB data (estimated) """ ) diff --git a/src/backy/tests/test_daemon.py b/src/backy/tests/test_daemon.py index bd8eb0d2..8db848b7 100644 --- a/src/backy/tests/test_daemon.py +++ b/src/backy/tests/test_daemon.py @@ -167,7 +167,7 @@ async def test_run_callback(daemon, log): assert isinstance(r["tags"][0], str) assert isinstance(r["stats"]["bytes_written"], int) assert isinstance(r["stats"]["duration"], float) - assert isinstance(r["location"], str) + assert isinstance(r["server"], str) def test_spread(daemon): diff --git a/src/backy/tests/test_main.py b/src/backy/tests/test_main.py index a4317cd0..7663012f 100644 --- a/src/backy/tests/test_main.py +++ b/src/backy/tests/test_main.py @@ -449,11 +449,11 @@ def test_commands_wrapper_status( assert err == "" assert out == Ellipsis( """\ -┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━┳━━━━━━━━━┳━━━━━━━━━━┳━━━━━━┳━━━━━━━━━┳━━━━━━━━━━┓ -┃ Date (...) ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Location ┃ -┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━╇━━━━━━━━━╇━━━━━━━━━━╇━━━━━━╇━━━━━━━━━╇━━━━━━━━━━┩ -│ ... │ 1 │ 0 Bytes │ - │ │ trusted │ │ -└──────────────────────┴────┴─────────┴──────────┴──────┴─────────┴──────────┘ +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━┳━━━━━━━━━┳━━━━━━━━━━┳━━━━━━┳━━━━━━━━━┳━━━━━━━━┓ +┃ Date (...) ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Server ┃ +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━╇━━━━━━━━━╇━━━━━━━━━━╇━━━━━━╇━━━━━━━━━╇━━━━━━━━┩ +│ ... │ 1 │ 0 Bytes │ - │ │ trusted │ │ +└──────────────────────┴────┴─────────┴──────────┴──────┴─────────┴────────┘ 1 revisions containing 0 Bytes data (estimated) """ ) @@ -477,9 +477,9 @@ def test_commands_wrapper_status_yaml( out == f"""\ - backend_type: {backup.default_backend_type} - location: '' orig_tags: [] parent: '' + server: '' stats: bytes_written: 42 duration: 3.5 diff --git a/src/backy/tests/test_revision.py b/src/backy/tests/test_revision.py index c5fb8b9b..498a6e7b 100644 --- a/src/backy/tests/test_revision.py +++ b/src/backy/tests/test_revision.py @@ -72,7 +72,7 @@ def test_store_revision_data(backup, clock, log): "stats": {"bytes_written": 0}, "tags": [], "orig_tags": [], - "location": "", + "server": "", "trust": "trusted", "timestamp": datetime.datetime(2015, 9, 1, 7, 6, 47, tzinfo=UTC), } @@ -89,7 +89,7 @@ def test_store_revision_data_no_parent(backup, clock, log): "stats": {"bytes_written": 0}, "tags": [], "orig_tags": [], - "location": "", + "server": "", "trust": "trusted", "timestamp": datetime.datetime(2015, 9, 1, 7, 6, 47, tzinfo=UTC), } From f5c77343c86c94f318c3b42fc6b6452fe5a59d37 Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Thu, 16 Nov 2023 21:48:17 +0100 Subject: [PATCH 05/13] use aiofiles --- .pre-commit-config.yaml | 1 + lib.nix | 3 +++ poetry.lock | 24 ++++++++++++++++++++++- pyproject.toml | 2 ++ src/backy/api.py | 2 +- src/backy/daemon.py | 36 ++++++++++++++++------------------ src/backy/tests/test_daemon.py | 10 ++++++++-- src/backy/utils.py | 21 +++++++++++++------- 8 files changed, 69 insertions(+), 30 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9eb2f4f5..b093fc4c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -32,6 +32,7 @@ repos: - types-PyYAML==5.4.0 - types-setuptools - types-tzlocal==4.2 + - types-aiofiles==23.2 exclude: tests args: - --check-untyped-defs diff --git a/lib.nix b/lib.nix index a8568522..56204c2a 100644 --- a/lib.nix +++ b/lib.nix @@ -48,6 +48,9 @@ let # replace poetry to avoid dependency on vulnerable python-cryptography package nativeBuildInputs = [ super.poetry-core ] ++ builtins.filter (p: p.pname or "" != "poetry") old.nativeBuildInputs; }); + aiofiles = super.aiofiles.overrideAttrs (old: { + buildInputs = (old.buildInputs or []) ++ [ super.hatchling super.hatch-vcs ]; + }); nh3 = let getCargoHash = version: { diff --git a/poetry.lock b/poetry.lock index ba3285f3..a29ce684 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,16 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +[[package]] +name = "aiofiles" +version = "23.2.1" +description = "File support for asyncio." +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiofiles-23.2.1-py3-none-any.whl", hash = "sha256:19297512c647d4b27a2cf7c34caa7e405c0d60b5560618a29a9fe027b18b0107"}, + {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"}, +] + [[package]] name = "aiohttp" version = "3.9.1" @@ -96,6 +107,17 @@ yarl = ">=1.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns", "brotlicffi"] +[[package]] +name = "aioshutil" +version = "1.3" +description = "Asynchronous shutil module." +optional = false +python-versions = ">=3.8" +files = [ + {file = "aioshutil-1.3-py3-none-any.whl", hash = "sha256:a441c99ef5f9b77fdd429ea7d043b8a358aa9b0c87043868113f9790c9aea400"}, + {file = "aioshutil-1.3.tar.gz", hash = "sha256:ddabe1748c8a71ec3c7d213a4d0cf58fb495c71419334a0575b1f8a3be8373e8"}, +] + [[package]] name = "aiosignal" version = "1.3.1" @@ -1921,4 +1943,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "~3.10" -content-hash = "296d5688b57d223d716e54617bf97f878d3014d2336e2fbc96a5795aeff885e5" +content-hash = "8af2b0b4beb7499d0d3a541c58a6ece2c2f84cb6b482e9e88ec04fbfe97c8a02" diff --git a/pyproject.toml b/pyproject.toml index bc78d049..3a9af314 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,8 @@ aiohttp = "^3.8.4" rich = "^13.3.2" yarl = "1.9.2" frozenlist = "1.4.0" +aiofiles = "^23.2.1" +aioshutil = "^1.3" [tool.poetry.dev-dependencies] pre-commit = "^3.3.3" diff --git a/src/backy/api.py b/src/backy/api.py index 5ba7e498..b3a281c2 100644 --- a/src/backy/api.py +++ b/src/backy/api.py @@ -176,7 +176,7 @@ async def run_job(self, request: web.Request): async def list_backups(self, request: web.Request) -> List[str]: request["log"].info("list-backups") - return self.daemon.find_dead_backups() + return await self.daemon.find_dead_backups() async def get_backup(self, request: web.Request) -> Backup: name = request.match_info.get("backup_name", None) diff --git a/src/backy/daemon.py b/src/backy/daemon.py index c1b9cfe9..c8c5ef79 100644 --- a/src/backy/daemon.py +++ b/src/backy/daemon.py @@ -1,13 +1,15 @@ import asyncio import fcntl import os -import shutil +import os.path as p import signal import sys import time from pathlib import Path from typing import IO, List, Optional, Pattern +import aiofiles.os as aos +import aioshutil import yaml from structlog.stdlib import BoundLogger @@ -15,7 +17,7 @@ from .revision import filter_manual_tags from .schedule import Schedule from .scheduler import Job -from .utils import has_recent_changes +from .utils import has_recent_changes, is_dir_no_symlink daemon: "BackyDaemon" @@ -301,37 +303,33 @@ def status(self, filter_re: Optional[Pattern[str]] = None) -> List[dict]: return result async def purge_old_files(self): - # `stat` and other file system access things are _not_ - # properly async, we might want to spawn those off into a separate - # thread. while True: try: self.log.info("purge-scanning") - for candidate in os.scandir(self.base_dir): - if not candidate.is_dir(follow_symlinks=False): + for candidate in await aos.scandir(self.base_dir): + if not await is_dir_no_symlink(candidate.path): continue self.log.debug("purge-candidate", candidate=candidate.path) reference_time = time.time() - 3 * 31 * 24 * 60 * 60 - if not has_recent_changes(candidate, reference_time): + if not await has_recent_changes( + candidate.path, reference_time + ): self.log.info("purging", candidate=candidate.path) - shutil.rmtree(candidate) + await aioshutil.rmtree(candidate) self.log.info("purge-finished") except Exception: self.log.exception("purge") await asyncio.sleep(24 * 60 * 60) async def purge_pending_backups(self): - # `stat` and other file system access things are _not_ - # properly async, we might want to spawn those off into a separate - # thread. while True: try: self.log.info("purge-pending-scanning") - for candidate in os.scandir(self.base_dir): + for candidate in await aos.scandir(self.base_dir): if ( - not candidate.is_dir(follow_symlinks=False) - or candidate.name in self.jobs # will get purged anyway - or not p.exists( + candidate.name in self.jobs # will get purged anyway + or not await is_dir_no_symlink(candidate.path) + or not await aos.path.exists( p.join(candidate.path, ".purge_pending") ) ): @@ -343,12 +341,12 @@ async def purge_pending_backups(self): self.log.exception("purge-pending") await asyncio.sleep(24 * 60 * 60) - def find_dead_backups(self) -> List[str]: + async def find_dead_backups(self) -> List[str]: self.log.debug("scanning-backups") return [ b.name - for b in os.scandir(self.base_dir) - if b.is_dir(follow_symlinks=False) and b.name not in self.jobs + for b in await aos.scandir(self.base_dir) + if await is_dir_no_symlink(b.path) and b.name not in self.jobs ] diff --git a/src/backy/tests/test_daemon.py b/src/backy/tests/test_daemon.py index 8db848b7..9a6934ae 100644 --- a/src/backy/tests/test_daemon.py +++ b/src/backy/tests/test_daemon.py @@ -18,7 +18,7 @@ @pytest.fixture -async def daemon(tmp_path, log): +async def daemon(tmp_path, monkeypatch, log): daemon = BackyDaemon(tmp_path / "config", log) source = str(tmp_path / "test01.source") with open(str(tmp_path / "config"), "w") as f: @@ -57,7 +57,13 @@ async def daemon(tmp_path, log): tmp_path.joinpath("dead01").mkdir() - daemon.start(asyncio.get_running_loop()) + async def null_coroutine(): + return + + with monkeypatch.context() as m: + m.setattr(daemon, "purge_old_files", null_coroutine) + m.setattr(daemon, "purge_pending_backups", null_coroutine) + daemon.start(asyncio.get_running_loop()) yield daemon daemon.terminate() diff --git a/src/backy/utils.py b/src/backy/utils.py index 1eaafb53..e6c58be3 100644 --- a/src/backy/utils.py +++ b/src/backy/utils.py @@ -15,6 +15,7 @@ from typing import IO, Callable, Iterable, List, Optional, TypeVar from zoneinfo import ZoneInfo +import aiofiles.os as aos import humanize import structlog import tzlocal @@ -441,7 +442,11 @@ def min_date(): return datetime.datetime.min.replace(tzinfo=ZoneInfo("UTC")) -def has_recent_changes(entry: DirEntry, reference_time: float): +async def is_dir_no_symlink(p): + return await aos.path.isdir(p) and not await aos.path.islink(p) + + +async def has_recent_changes(path: str, reference_time: float) -> bool: # This is not efficient on a first look as we may stat things twice, but it # makes the recursion easier to read and the VFS will be caching this # anyway. @@ -449,18 +454,20 @@ def has_recent_changes(entry: DirEntry, reference_time: float): # higher levels will propagate changed mtimes do to new/deleted files # instead of just modified files in our case and looking at stats when # traversing a directory level is faster than going depth first. - if not entry.is_dir(follow_symlinks=False): - return False - if entry.stat(follow_symlinks=False).st_mtime >= reference_time: + st = await aos.stat(path, follow_symlinks=False) + if st.st_mtime >= reference_time: return True - candidates = list(os.scandir(entry.path)) + if not await is_dir_no_symlink(path): + return False + candidates = list(await aos.scandir(path)) # First pass: stat all direct entries for candidate in candidates: - if candidate.stat(follow_symlinks=False).st_mtime >= reference_time: + st = await aos.stat(candidate.path, follow_symlinks=False) + if st.st_mtime >= reference_time: return True # Second pass: start traversing for candidate in candidates: - if has_recent_changes(candidate, reference_time): + if await has_recent_changes(candidate.path, reference_time): return True return False From cf8664e9f86fd6e7833d7c9242240256e35ff27b Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Fri, 2 Feb 2024 00:11:48 +0100 Subject: [PATCH 06/13] status: display diff of changed tags --- src/backy/main.py | 21 ++++++++++++++++++--- src/backy/tests/test_main.py | 36 ++++++++++++++++++++++++++++-------- 2 files changed, 46 insertions(+), 11 deletions(-) diff --git a/src/backy/main.py b/src/backy/main.py index f95b4e7f..7cfc4659 100644 --- a/src/backy/main.py +++ b/src/backy/main.py @@ -62,6 +62,14 @@ def status(self, yaml_: bool, revision: str) -> None: else: duration = "-" + if r.pending_changes: + added = [f"+[on green]{t}[/]" for t in r.tags - r.orig_tags] + removed = [f"-[on red]{t}[/]" for t in r.orig_tags - r.tags] + same = list(r.orig_tags & r.tags) + tags = ",".join(added + removed + same) + else: + tags = ",".join(r.tags) + t.add_row( format_datetime_local(r.timestamp)[0], r.uuid, @@ -69,9 +77,11 @@ def status(self, yaml_: bool, revision: str) -> None: r.stats.get("bytes_written", 0), binary=True ), duration, - ",".join(r.tags), + tags, r.trust.value, - r.server, + f"[underline italic]{r.server}[/]" + if r.pending_changes + else r.server, ) rprint(t) @@ -81,6 +91,11 @@ def status(self, yaml_: bool, revision: str) -> None: len(revs), humanize.naturalsize(total_bytes, binary=True) ) ) + pending_changes = sum(1 for r in revs if r.pending_changes) + if pending_changes: + rprint( + f"[yellow]{pending_changes} pending change(s)[/] (Push changes with `backy push`)" + ) def backup(self, tags: str, force: bool) -> None: b = Backup(self.path, self.log) @@ -313,7 +328,7 @@ def setup_argparser(): choices=list(RestoreBackend), default=RestoreBackend.AUTO, dest="restore_backend", - help="(default: %(default)s)" + help="(default: %(default)s)", ) p.add_argument( "-r", diff --git a/src/backy/tests/test_main.py b/src/backy/tests/test_main.py index 7663012f..b6938c73 100644 --- a/src/backy/tests/test_main.py +++ b/src/backy/tests/test_main.py @@ -1,3 +1,4 @@ +import datetime import os import pprint import sys @@ -440,8 +441,20 @@ def test_commands_wrapper_status( ): commands = backy.main.Command(tmp_path, "AAAA", log) - revision = Revision.create(backup, set(), log, uuid="1") - revision.materialize() + revision1 = Revision.create(backup, {"daily"}, log, uuid="1") + revision1.materialize() + + revision2 = Revision.create(backup, {"daily"}, log, uuid="2") + revision2.timestamp = backy.utils.now() + datetime.timedelta(hours=1) + revision2.server = "remote" + revision2.orig_tags = {"daily"} + revision2.materialize() + + revision3 = Revision.create(backup, {"new", "same"}, log, uuid="3") + revision3.timestamp = backy.utils.now() + datetime.timedelta(hours=2) + revision3.server = "remote" + revision3.orig_tags = {"old", "same"} + revision3.materialize() commands.status(yaml_=False, revision="all") out, err = capsys.readouterr() @@ -449,12 +462,19 @@ def test_commands_wrapper_status( assert err == "" assert out == Ellipsis( """\ -┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━┳━━━━━━━━━┳━━━━━━━━━━┳━━━━━━┳━━━━━━━━━┳━━━━━━━━┓ -┃ Date (...) ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Server ┃ -┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━╇━━━━━━━━━╇━━━━━━━━━━╇━━━━━━╇━━━━━━━━━╇━━━━━━━━┩ -│ ... │ 1 │ 0 Bytes │ - │ │ trusted │ │ -└──────────────────────┴────┴─────────┴──────────┴──────┴─────────┴────────┘ -1 revisions containing 0 Bytes data (estimated) +┏━━━━━━━━━━━━━━━━┳━━━━┳━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━┓ +┃ Date ┃ ┃ ┃ ┃ ┃ ┃ ┃ +┃ (Europe/Berli… ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Server ┃ +┡━━━━━━━━━━━━━━━━╇━━━━╇━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━┩ +│ 2015-09-01 │ 1 │ 0 Bytes │ - │ daily │ trusted │ │ +│ 09:06:47 │ │ │ │ │ │ │ +│ 2015-09-01 │ 2 │ 0 Bytes │ - │ daily │ trusted │ remote │ +│ 10:06:47 │ │ │ │ │ │ │ +│ 2015-09-01 │ 3 │ 0 Bytes │ - │ +new,-old,same │ trusted │ remote │ +│ 11:06:47 │ │ │ │ │ │ │ +└────────────────┴────┴─────────┴──────────┴────────────────┴─────────┴────────┘ +3 revisions containing 0 Bytes data (estimated) +1 pending change(s) (Push changes with `backy push`) """ ) From 88b5440e46ca8578ecfa2f6caf2d3e040731baa2 Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Fri, 2 Feb 2024 18:10:18 +0100 Subject: [PATCH 07/13] allow selecting servers in rev spec --- ...20240205_012340_jb_issue_30_server_migration.rst | 3 +++ doc/man-backy.rst | 7 +++++++ src/backy/backup.py | 11 +++++++++-- src/backy/main.py | 4 ++-- src/backy/tests/test_archive.py | 13 +++++++++++++ 5 files changed, 34 insertions(+), 4 deletions(-) create mode 100644 changelog.d/20240205_012340_jb_issue_30_server_migration.rst diff --git a/changelog.d/20240205_012340_jb_issue_30_server_migration.rst b/changelog.d/20240205_012340_jb_issue_30_server_migration.rst new file mode 100644 index 00000000..0b698462 --- /dev/null +++ b/changelog.d/20240205_012340_jb_issue_30_server_migration.rst @@ -0,0 +1,3 @@ +.. A new scriv changelog fragment. + +- Add `server:` selector to revision spec diff --git a/doc/man-backy.rst b/doc/man-backy.rst index e58a45a6..3896ded0 100644 --- a/doc/man-backy.rst +++ b/doc/man-backy.rst @@ -170,6 +170,13 @@ Subcommand-specific options Trust state. Ordered by date, oldest first. * A tag with the **tag:** prefix. Selects all revisions with this tag. Ordered by date, oldest first. + * A server with the **server:** prefix: Selects all revisions located on + this server. The current server can be selected with an empty string. + Ordered by date, oldest first. + * The key word **local** selects all revisions located on the current + server (`server:`). + * The key word **local** selects all revisions located on remote servers + (`not(server:)`). * An inclusive range using two single revision specifiers separated with two dots. The singe revision specifiers may be omitted, in which case the **first** and/or **last** revision is assumed. diff --git a/src/backy/backup.py b/src/backy/backup.py index d92e220d..633dbe3d 100644 --- a/src/backy/backup.py +++ b/src/backy/backup.py @@ -309,7 +309,7 @@ def backup(self, tags: set[str], force: bool = False) -> None: except BackendException: self.log.exception("backend-error-distrust-all") verified = False - self.distrust("all", skip_lock=True) + self.distrust("local", skip_lock=True) if not verified: self.log.error( "verification-failed", @@ -601,7 +601,10 @@ def find_revisions( return [token] elif isinstance(token, list): return token - if token.startswith("tag:"): + if token.startswith("server:"): + server = token.removeprefix("server:") + return [r for r in self.history if server == r.server] + elif token.startswith("tag:"): tag = token.removeprefix("tag:") return [r for r in self.history if tag in r.tags] elif token.startswith("trust:"): @@ -611,6 +614,10 @@ def find_revisions( return self.history[:] elif token == "clean": return self.clean_history[:] + elif token == "local": + return self.find_revisions("server:") + elif token == "remote": + return self.find_revisions("not(server:)") else: return [self.find(token)] diff --git a/src/backy/main.py b/src/backy/main.py index 7cfc4659..9fba2c78 100644 --- a/src/backy/main.py +++ b/src/backy/main.py @@ -415,7 +415,7 @@ def setup_argparser(): "-r", "--revision", metavar="SPEC", - default="all", + default="local", help="use revision SPEC to distrust (default: %(default)s)", ) p.set_defaults(func="distrust") @@ -429,7 +429,7 @@ def setup_argparser(): "-r", "--revision", metavar="SPEC", - default="trust:distrusted", + default="trust:distrusted&local", help="use revision SPEC to verify (default: %(default)s)", ) p.set_defaults(func="verify") diff --git a/src/backy/tests/test_archive.py b/src/backy/tests/test_archive.py index 324deb3e..2b0d81c6 100644 --- a/src/backy/tests/test_archive.py +++ b/src/backy/tests/test_archive.py @@ -23,6 +23,7 @@ def backup_with_revisions(backup, tmp_path): timestamp: 2015-08-30 01:00:00+00:00 parent: 123-0 stats: {bytes_written: 1486880, duration: 3.7} +server: remote2 tags: [daily, weekly] """ ) @@ -33,6 +34,7 @@ def backup_with_revisions(backup, tmp_path): timestamp: 2015-08-30 02:00:00+00:00 parent: 123-1 stats: {} +server: remote1 tags: [daily] """ ) @@ -123,6 +125,17 @@ def test_find_revisions(backup_with_revisions): assert a.find_revisions("( (first( (123-0, 123-1)) ))") == [ a.find("123-0"), ] + assert a.find_revisions("server:aaaa") == [] + assert a.find_revisions("server:remote1") == [ + a.find("123-2"), + ] + assert a.find_revisions("local") == [ + a.find("123-0"), + ] + assert a.find_revisions("remote") == [ + a.find("123-1"), + a.find("123-2"), + ] def test_find_revisions_should_raise_invalid_spec(backup_with_revisions): From 35e25bc193033f27b60cc6cecb296500fab11c18 Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Fri, 2 Feb 2024 17:51:43 +0100 Subject: [PATCH 08/13] warn on pending changes and prevent illegal remote rev modification --- src/backy/backup.py | 31 +++++++++++++++++++++++++++++-- src/backy/main.py | 3 +++ src/backy/revision.py | 2 ++ 3 files changed, 34 insertions(+), 2 deletions(-) diff --git a/src/backy/backup.py b/src/backy/backup.py index 633dbe3d..ab45fce2 100644 --- a/src/backy/backup.py +++ b/src/backy/backup.py @@ -221,6 +221,29 @@ def validate_tags(self, tags): ) raise RuntimeError("Unknown tags") + def warn_pending_changes(self, revs: Optional[List[Revision]] = None): + revs = revs if revs is not None else self.history + pending = [r for r in revs if r.pending_changes] + if pending: + self.log.warning( + "pending-changes", + _fmt_msg="Synchronize with remote server (backy push) or risk loosing changes", + revisions=",".join(r.uuid for r in pending), + ) + + def prevent_remote_rev(self, revs: Optional[List[Revision]] = None): + revs = revs if revs is not None else self.history + remote = [r for r in revs if r.server] + if remote: + self.log.error( + "remote-revs-disallowed", + _fmt_msg="Can not modify trust state of remote revisions locally.\n" + "Either include a filter to exclude them (local)\n" + "or edit them on the origin server and pull the changes (backy pull)", + revisions=",".join(r.uuid for r in remote), + ) + raise RuntimeError("Remote revs disallowed") + ################# # Making backups @@ -341,13 +364,17 @@ def backup(self, tags: set[str], force: bool = False) -> None: @locked(target=".backup", mode="exclusive") def distrust(self, revision: str) -> None: - for r in self.find_revisions(revision): + revs = self.find_revisions(revision) + self.prevent_remote_rev(revs) + for r in revs: r.distrust() r.write_info() @locked(target=".purge", mode="shared") def verify(self, revision: str) -> None: - for r in self.find_revisions(revision): + revs = self.find_revisions(revision) + self.prevent_remote_rev(revs) + for r in revs: r.backend.verify() @locked(target=".purge", mode="exclusive") diff --git a/src/backy/main.py b/src/backy/main.py index 9fba2c78..7f18f8ab 100644 --- a/src/backy/main.py +++ b/src/backy/main.py @@ -127,6 +127,7 @@ def find(self, revision: str, uuid: bool) -> None: def forget(self, revision: str) -> None: b = Backup(self.path, self.log) b.forget(revision) + b.warn_pending_changes() def scheduler(self, config: Path) -> None: backy.daemon.main(config, self.log) @@ -203,11 +204,13 @@ def tags( autoremove=autoremove, force=force, ) + b.warn_pending_changes() return int(not success) def expire(self) -> None: b = backy.backup.Backup(self.path, self.log) b.expire() + b.warn_pending_changes() def setup_argparser(): diff --git a/src/backy/revision.py b/src/backy/revision.py index 2ed90300..85b5abd2 100644 --- a/src/backy/revision.py +++ b/src/backy/revision.py @@ -143,10 +143,12 @@ def pending_changes(self): return self.server and self.tags != self.orig_tags def distrust(self) -> None: + assert not self.server self.log.info("distrusted") self.trust = Trust.DISTRUSTED def verify(self) -> None: + assert not self.server self.log.info("verified") self.trust = Trust.VERIFIED From 27ba17ab63ff31fc60f68e44a31d0c056b698a59 Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Fri, 2 Feb 2024 23:46:18 +0100 Subject: [PATCH 09/13] move push/pull metadata into a subprocess --- ...05_012140_jb_issue_30_server_migration.rst | 3 + src/backy/backup.py | 1 - src/backy/main.py | 42 +++++++++++- src/backy/scheduler.py | 66 ++++++++++++++++++- src/backy/tests/test_api.py | 2 +- src/backy/tests/test_daemon.py | 4 +- src/backy/tests/test_main.py | 52 ++++++++++++++- 7 files changed, 161 insertions(+), 9 deletions(-) create mode 100644 changelog.d/20240205_012140_jb_issue_30_server_migration.rst diff --git a/changelog.d/20240205_012140_jb_issue_30_server_migration.rst b/changelog.d/20240205_012140_jb_issue_30_server_migration.rst new file mode 100644 index 00000000..71ca15cc --- /dev/null +++ b/changelog.d/20240205_012140_jb_issue_30_server_migration.rst @@ -0,0 +1,3 @@ +.. A new scriv changelog fragment. + +- Add `push` and `pull` subcommand diff --git a/src/backy/backup.py b/src/backy/backup.py index ab45fce2..22c4456d 100644 --- a/src/backy/backup.py +++ b/src/backy/backup.py @@ -752,7 +752,6 @@ def find(self, spec: str) -> Revision: ################### # Syncing Revisions - # called by the scheduler without a subprocess @locked(target=".backup", mode="exclusive") async def push_metadata(self, peers, taskid: str): diff --git a/src/backy/main.py b/src/backy/main.py index 7f18f8ab..74830edf 100644 --- a/src/backy/main.py +++ b/src/backy/main.py @@ -5,7 +5,7 @@ import errno import sys from pathlib import Path -from typing import Optional +from typing import Literal, Optional import humanize import structlog @@ -212,6 +212,18 @@ def expire(self) -> None: b.expire() b.warn_pending_changes() + def push(self, config: Path): + d = backy.daemon.BackyDaemon(config, self.log) + d._read_config() + b = backy.backup.Backup(self.path, self.log) + asyncio.run(b.push_metadata(d.peers, self.taskid)) + + def pull(self, config: Path): + d = backy.daemon.BackyDaemon(config, self.log) + d._read_config() + b = backy.backup.Backup(self.path, self.log) + asyncio.run(b.pull_metadata(d.peers, self.taskid)) + def setup_argparser(): parser = argparse.ArgumentParser( @@ -494,6 +506,34 @@ def setup_argparser(): ) p.set_defaults(func="expire") + # PUSH + p = subparsers.add_parser( + "push", + help="Push pending changes to remote servers", + ) + p.add_argument( + "-c", + "--config", + type=Path, + default="/etc/backy.conf", + help="(default: %(default)s)", + ) + p.set_defaults(func="push") + + # PULL + p = subparsers.add_parser( + "pull", + help="Push pending changes to remote servers", + ) + p.add_argument( + "-c", + "--config", + type=Path, + default="/etc/backy.conf", + help="(default: %(default)s)", + ) + p.set_defaults(func="pull") + return parser, client diff --git a/src/backy/scheduler.py b/src/backy/scheduler.py index 7fcd4a89..10974c2a 100644 --- a/src/backy/scheduler.py +++ b/src/backy/scheduler.py @@ -225,10 +225,72 @@ async def run_forever(self): self.update_status("finished") async def pull_metadata(self): - await self.backup.pull_metadata(self.daemon.peers, self.taskid) + self.log.info("pull-metadata-started") + proc = await asyncio.create_subprocess_exec( + BACKY_CMD, + "-t", + self.taskid, + "-b", + self.path, + "-l", + self.logfile, + "pull", + "-c", + self.daemon.config_file, + close_fds=True, + start_new_session=True, # Avoid signal propagation like Ctrl-C + stdin=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + try: + return_code = await proc.wait() + self.log.info( + "pull-metadata-finished", + return_code=return_code, + subprocess_pid=proc.pid, + ) + except asyncio.CancelledError: + self.log.warning("pull-metadata-cancelled") + try: + proc.terminate() + except ProcessLookupError: + pass + raise async def push_metadata(self): - await self.backup.push_metadata(self.daemon.peers, self.taskid) + self.log.info("push-metadata-started") + proc = await asyncio.create_subprocess_exec( + BACKY_CMD, + "-t", + self.taskid, + "-b", + self.path, + "-l", + self.logfile, + "push", + "-c", + self.daemon.config_file, + close_fds=True, + start_new_session=True, # Avoid signal propagation like Ctrl-C + stdin=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + try: + return_code = await proc.wait() + self.log.info( + "push-metadata-finished", + return_code=return_code, + subprocess_pid=proc.pid, + ) + except asyncio.CancelledError: + self.log.warning("push-metadata-cancelled") + try: + proc.terminate() + except ProcessLookupError: + pass + raise async def run_backup(self, tags): self.log.info("backup-started", tags=", ".join(tags)) diff --git a/src/backy/tests/test_api.py b/src/backy/tests/test_api.py index 7c61b847..c119b0c0 100644 --- a/src/backy/tests/test_api.py +++ b/src/backy/tests/test_api.py @@ -270,7 +270,7 @@ async def test_split_brain(daemons, log): revs[3].uuid, ] - bs[1].history[0].tags.add("manual:new1") + bs[1].tags("add", bs[1].history[0].uuid, {"manual:new1"}) bs[3].history[0].remove() await js[1].push_metadata() diff --git a/src/backy/tests/test_daemon.py b/src/backy/tests/test_daemon.py index 9a6934ae..32e1a98a 100644 --- a/src/backy/tests/test_daemon.py +++ b/src/backy/tests/test_daemon.py @@ -321,6 +321,8 @@ async def failing_coroutine(*args, **kw): monkeypatch.setattr(job, "run_purge", null_coroutine) monkeypatch.setattr(job, "run_callback", null_coroutine) monkeypatch.setattr(job, "run_backup", failing_coroutine) + monkeypatch.setattr(job, "pull_metadata", failing_coroutine) + monkeypatch.setattr(job, "push_metadata", failing_coroutine) # This patch causes a single run through the generator loop. def update_status(status): @@ -375,8 +377,6 @@ async def wait_for_job_finished(): ... W test01[...] job/backoff backoff=480 ... D test01[...] quarantine/scan entries=0 ... I test01[...] job/waiting next_tags='daily' next_time='2015-09-01 09:14:47' -... I test01[...] backup/pull-start \n\ -... I test01[...] backup/push-start \n\ ... I test01[...] job/stop \n\ ... D test01[...] quarantine/scan entries=0 ... I test01[...] job/waiting next_tags='daily' next_time='2015-09-02 07:32:51' diff --git a/src/backy/tests/test_main.py b/src/backy/tests/test_main.py index b6938c73..32710b19 100644 --- a/src/backy/tests/test_main.py +++ b/src/backy/tests/test_main.py @@ -31,7 +31,7 @@ def test_display_usage(capsys, argv): """\ usage: pytest [-h] [-v] [-l LOGFILE] [-b BACKUPDIR] [-t TASKID] {client,backup,restore,purge,find,status,\ -upgrade,scheduler,distrust,verify,forget,tags,expire} +upgrade,scheduler,distrust,verify,forget,tags,expire,push,pull} ... """ == out @@ -66,7 +66,7 @@ def test_display_help(capsys, argv): """\ usage: pytest [-h] [-v] [-l LOGFILE] [-b BACKUPDIR] [-t TASKID] {client,backup,restore,purge,find,status,\ -upgrade,scheduler,distrust,verify,forget,tags,expire} +upgrade,scheduler,distrust,verify,forget,tags,expire,push,pull} ... Backup and restore for block devices. @@ -399,6 +399,54 @@ def test_call_expire(capsys, backup, argv, monkeypatch): assert exit.value.code == 0 +@pytest.mark.parametrize("action", ["pull", "push"]) +def test_call_pull_push(capsys, backup, argv, monkeypatch, tmp_path, action): + monkeypatch.setattr(backy.main.Command, action, print_args) + conf = tmp_path / "conf" + with open(conf, "w") as c: + c.write( + f"""\ +global: + base-dir: {str(tmp_path)} +api: + addrs: "127.0.0.1, ::1" + port: 1234 + cli-default: + token: "test" +peers : {{}} +schedules: {{}} +jobs: {{}} +""" + ) + + argv.extend(["-v", "-b", str(backup.path), action, "-c", str(conf)]) + utils.log_data = "" + with pytest.raises(SystemExit) as exit: + backy.main.main() + assert exit.value.code == 0 + out, err = capsys.readouterr() + assert ( + Ellipsis( + f"""\ +(,) +{{'config': {repr(conf)}}} +""" + ) + == out + ) + assert ( + Ellipsis( + f"""\ +... D command/invoked args='... -v -b {backup.path} {action} -c {conf}' +... D command/parsed func='{action}' func_args={{'config': {repr(conf)}}} +... D command/successful \n\ +""" + ) + == utils.log_data + ) + assert exit.value.code == 0 + + def test_call_unexpected_exception( capsys, backup, argv, monkeypatch, log, tmp_path ): From 6a7957d447a726210f5fce59044d72e96df4a9f7 Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Sat, 3 Feb 2024 22:38:50 +0100 Subject: [PATCH 10/13] split history into local/remote --- src/backy/api.py | 8 ++-- src/backy/backends/chunked/__init__.py | 4 +- .../backends/chunked/tests/test_backend.py | 3 ++ src/backy/backup.py | 42 ++++++++++++++----- src/backy/daemon.py | 23 +++++----- src/backy/revision.py | 2 + src/backy/sources/ceph/source.py | 6 +-- src/backy/tests/test_archive.py | 20 ++++++--- 8 files changed, 71 insertions(+), 37 deletions(-) diff --git a/src/backy/api.py b/src/backy/api.py index b3a281c2..b6a7e673 100644 --- a/src/backy/api.py +++ b/src/backy/api.py @@ -210,11 +210,9 @@ async def touch_backup(self, request: web.Request): async def get_revs(self, request: web.Request) -> List[Revision]: backup = await self.get_backup(request) request["log"].info("get-revs", name=backup.name) - if request.query.get("only_clean", "") == "1": - revs = backup.clean_history - else: - revs = backup.history - return [r for r in revs if not r.server] + return backup.get_history( + local=True, clean=request.query.get("only_clean", "") == "1" + ) async def put_tags(self, request: web.Request): json = await request.json() diff --git a/src/backy/backends/chunked/__init__.py b/src/backy/backends/chunked/__init__.py index 90b802f0..0fbf74c5 100644 --- a/src/backy/backends/chunked/__init__.py +++ b/src/backy/backends/chunked/__init__.py @@ -50,7 +50,7 @@ def open(self, mode: str = "rb", parent: Optional[Revision] = None) -> File: # def purge(self) -> None: self.log.debug("purge") used_chunks: Set[Hash] = set() - for revision in self.backup.history: + for revision in self.backup.local_history: if revision.backend_type != "chunked": continue used_chunks.update( @@ -65,7 +65,7 @@ def verify(self): verified_chunks: Set[Hash] = set() # Load verified chunks to avoid duplicate work - for revision in self.backup.clean_history: + for revision in self.backup.get_history(clean=True, local=True): if ( revision.trust != Trust.VERIFIED or revision.backend_type != "chunked" diff --git a/src/backy/backends/chunked/tests/test_backend.py b/src/backy/backends/chunked/tests/test_backend.py index c0fd1649..c93c2362 100644 --- a/src/backy/backends/chunked/tests/test_backend.py +++ b/src/backy/backends/chunked/tests/test_backend.py @@ -37,6 +37,9 @@ def test_purge(simple_file_config, log): f.write(b"asdf") f.close() r.materialize() + remote = Revision(b, log) # remote revision without local data + remote.server = "remote" + remote.materialize() b.scan() # Reassign as the scan will create a new reference r = b.history[0] diff --git a/src/backy/backup.py b/src/backy/backup.py index 22c4456d..8a0679f0 100644 --- a/src/backy/backup.py +++ b/src/backy/backup.py @@ -156,13 +156,13 @@ def __init__(self, path: Path, log: BoundLogger): # Initialize our backend self.default_backend_type = self.config["source"].get("backend", None) if self.default_backend_type is None: - if not self.history: + if not self.local_history: # Start fresh backups with our new default. self.default_backend_type = "chunked" else: # Choose to continue existing backups with whatever format # they are in. - self.default_backend_type = self.history[-1].backend_type + self.default_backend_type = self.local_history[-1].backend_type self.schedule = Schedule() self.schedule.configure(self.config["schedule"]) @@ -199,14 +199,34 @@ def set_purge_pending(self): def clear_purge_pending(self): self.path.joinpath(".purge_pending").unlink(missing_ok=True) + def get_history( + self, *, clean: bool = False, local: bool = False + ) -> list[Revision]: + return [ + rev + for rev in self.history + if (not clean or "duration" in rev.stats) + and (not local or not rev.server) + ] + @property def clean_history(self) -> List[Revision]: """History without incomplete revisions.""" - return [rev for rev in self.history if "duration" in rev.stats] + return self.get_history(clean=True) + + @property + def local_history(self): + """History without incomplete revisions.""" + return self.get_history(local=True) @property def contains_distrusted(self) -> bool: - return any((r == Trust.DISTRUSTED for r in self.clean_history)) + return any( + ( + r == Trust.DISTRUSTED + for r in self.get_history(clean=True, local=True) + ) + ) def validate_tags(self, tags): missing_tags = ( @@ -250,7 +270,7 @@ def prevent_remote_rev(self, revs: Optional[List[Revision]] = None): @locked(target=".backup", mode="exclusive") def _clean(self) -> None: """Clean-up incomplete revisions.""" - for revision in self.history: + for revision in self.local_history: if "duration" not in revision.stats: self.log.warning( "clean-incomplete", revision_uuid=revision.uuid @@ -356,7 +376,7 @@ def backup(self, tags: set[str], force: bool = False) -> None: # moving along automatically. This could also be moved into the # scheduler. self.scan() - for revision in reversed(self.clean_history): + for revision in reversed(self.get_history(clean=True, local=True)): if revision.trust == Trust.DISTRUSTED: self.log.warning("inconsistent") revision.backend.verify() @@ -379,7 +399,7 @@ def verify(self, revision: str) -> None: @locked(target=".purge", mode="exclusive") def purge(self) -> None: - self.history[-1].backend.purge() + self.local_history[-1].backend.purge() self.clear_purge_pending() ################# @@ -493,7 +513,9 @@ def upgrade(self) -> None: while True: self.scan() to_upgrade: List[Revision] = [ - r for r in self.clean_history if r.backend_type == "cowfile" + r + for r in self.get_history(clean=True, local=True) + if r.backend_type == "cowfile" ] if not to_upgrade: break @@ -640,7 +662,7 @@ def find_revisions( elif token == "all": return self.history[:] elif token == "clean": - return self.clean_history[:] + return self.clean_history elif token == "local": return self.find_revisions("server:") elif token == "remote": @@ -756,7 +778,7 @@ def find(self, spec: str) -> Revision: @locked(target=".backup", mode="exclusive") async def push_metadata(self, peers, taskid: str): grouped = defaultdict(list) - for r in self.history: + for r in self.clean_history: if r.pending_changes: grouped[r.server].append(r) self.log.info( diff --git a/src/backy/daemon.py b/src/backy/daemon.py index c8c5ef79..a18951eb 100644 --- a/src/backy/daemon.py +++ b/src/backy/daemon.py @@ -266,28 +266,27 @@ def status(self, filter_re: Optional[Pattern[str]] = None) -> List[dict]: job.backup.scan() manual_tags = set() unsynced_revs = 0 - if job.backup.clean_history: - last = job.backup.clean_history[-1] - for rev in job.backup.clean_history: - manual_tags |= filter_manual_tags(rev.tags) - if rev.pending_changes: - unsynced_revs += 1 - else: - last = None + history = job.backup.clean_history + for rev in history: + manual_tags |= filter_manual_tags(rev.tags) + if rev.pending_changes: + unsynced_revs += 1 result.append( dict( job=job.name, sla="OK" if job.sla else "TOO OLD", sla_overdue=job.sla_overdue, status=job.status, - last_time=last.timestamp if last else None, + last_time=history[-1].timestamp if history else None, last_tags=( - ",".join(job.schedule.sorted_tags(last.tags)) - if last + ",".join(job.schedule.sorted_tags(history[-1].tags)) + if history else None ), last_duration=( - last.stats.get("duration", 0) if last else None + history[-1].stats.get("duration", 0) + if history + else None ), next_time=job.next_time, next_tags=( diff --git a/src/backy/revision.py b/src/backy/revision.py index 85b5abd2..9f67fbdf 100644 --- a/src/backy/revision.py +++ b/src/backy/revision.py @@ -187,6 +187,8 @@ def get_parent(self, ignore_trust=False) -> Optional["Revision"]: continue if not ignore_trust and r.trust == Trust.DISTRUSTED: continue + if r.server != self.server: + continue if r.uuid == self.uuid: break prev = r diff --git a/src/backy/sources/ceph/source.py b/src/backy/sources/ceph/source.py index 82145138..8d6623fc 100644 --- a/src/backy/sources/ceph/source.py +++ b/src/backy/sources/ceph/source.py @@ -149,9 +149,9 @@ def _delete_old_snapshots(self) -> None: # revision - which is wrong: broken new revisions would always cause # full backups instead of new deltas based on the most recent valid # one. - if not self.always_full and self.revision.backup.history: - keep_snapshot_revision = self.revision.backup.history[-1] - keep_snapshot_revision = keep_snapshot_revision.uuid + # XXX this will break if multiple servers are active + if not self.always_full and self.revision.backup.local_history: + keep_snapshot_revision = self.revision.backup.local_history[-1].uuid else: keep_snapshot_revision = None for snapshot in self.rbd.snap_ls(self._image_name): diff --git a/src/backy/tests/test_archive.py b/src/backy/tests/test_archive.py index 2b0d81c6..232d4e58 100644 --- a/src/backy/tests/test_archive.py +++ b/src/backy/tests/test_archive.py @@ -23,7 +23,7 @@ def backup_with_revisions(backup, tmp_path): timestamp: 2015-08-30 01:00:00+00:00 parent: 123-0 stats: {bytes_written: 1486880, duration: 3.7} -server: remote2 +server: remote1 tags: [daily, weekly] """ ) @@ -58,10 +58,9 @@ def test_find_revision_empty(backup): def test_load_revisions(backup_with_revisions): a = backup_with_revisions assert [x.uuid for x in a.history] == ["123-0", "123-1", "123-2"] - assert a.history[1].uuid == "123-1" - assert a.history[1].get_parent().uuid == "123-0" - assert a.history[2].get_parent().uuid == "123-1" assert a.history[0].get_parent() is None + assert a.history[1].get_parent() is None + assert a.history[2].get_parent().uuid == "123-1" def test_find_revisions(backup_with_revisions): @@ -127,6 +126,7 @@ def test_find_revisions(backup_with_revisions): ] assert a.find_revisions("server:aaaa") == [] assert a.find_revisions("server:remote1") == [ + a.find("123-1"), a.find("123-2"), ] assert a.find_revisions("local") == [ @@ -173,8 +173,18 @@ def test_find_revision(backup_with_revisions): assert a.find(" first( tag:monthly ) ").uuid == "123-0" -def test_clean_history_should_exclude_incomplete_revs(backup_with_revisions): +def test_get_history(backup_with_revisions): assert 2 == len(backup_with_revisions.clean_history) + assert ( + backup_with_revisions.clean_history + == backup_with_revisions.get_history(clean=True) + ) + assert 1 == len(backup_with_revisions.local_history) + assert ( + backup_with_revisions.local_history + == backup_with_revisions.get_history(local=True) + ) + assert 1 == len(backup_with_revisions.get_history(clean=True, local=True)) def test_ignore_duplicates(backup_with_revisions, tmp_path): From b4e4b8b5d343a4365e85bdb61a3c16cb6ddbe7eb Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Mon, 12 Feb 2024 14:11:52 +0100 Subject: [PATCH 11/13] friendlier error messages --- lib.nix | 2 +- src/backy/api.py | 2 +- src/backy/backup.py | 68 ++++++++++++++++++++++-------------- src/backy/main.py | 53 ++++++++++++++++++++++------ src/backy/tests/test_api.py | 18 ++++++++++ src/backy/tests/test_main.py | 4 +-- 6 files changed, 107 insertions(+), 40 deletions(-) diff --git a/lib.nix b/lib.nix index 56204c2a..27c994af 100644 --- a/lib.nix +++ b/lib.nix @@ -121,7 +121,7 @@ in devShells = { default = mkShellNoCC { - BACKY_CMD = "backy"; + BACKY_CMD = "${poetryEnv}/bin/backy"; packages = [ poetryEnv poetry diff --git a/src/backy/api.py b/src/backy/api.py index b6a7e673..159c332f 100644 --- a/src/backy/api.py +++ b/src/backy/api.py @@ -88,7 +88,7 @@ async def reconfigure( ) await site.start() self.log.info("added-site", site=site.name) - for bind_addr, site in self.sites.items(): + for bind_addr, site in list(self.sites.items()): if bind_addr in bind_addrs: continue await site.stop() diff --git a/src/backy/backup.py b/src/backy/backup.py index 8a0679f0..f7283037 100644 --- a/src/backy/backup.py +++ b/src/backy/backup.py @@ -776,7 +776,7 @@ def find(self, spec: str) -> Revision: # Syncing Revisions @locked(target=".backup", mode="exclusive") - async def push_metadata(self, peers, taskid: str): + async def push_metadata(self, peers, taskid: str) -> int: grouped = defaultdict(list) for r in self.clean_history: if r.pending_changes: @@ -785,16 +785,20 @@ async def push_metadata(self, peers, taskid: str): "push-start", changes=sum(len(l) for l in grouped.values()) ) async with APIClientManager(peers, taskid, self.log) as apis: - await asyncio.gather( + errors = await asyncio.gather( *[ self._push_metadata(apis[server], grouped[server]) for server in apis ] ) - self.log.info("push-end") + self.log.info("push-end", errors=sum(errors)) + return sum(errors) - async def _push_metadata(self, api: APIClient, revs: List[Revision]): + async def _push_metadata( + self, api: APIClient, revs: List[Revision] + ) -> bool: purge_required = False + error = False for r in revs: log = self.log.bind( server=r.server, @@ -815,10 +819,13 @@ async def _push_metadata(self, api: APIClient, revs: List[Revision]): purge_required = True except ClientResponseError: log.warning("push-client-error", exc_style="short") + error = True except ClientConnectionError: - log.info("push-connection-error", exc_style="short") + log.warning("push-connection-error", exc_style="short") + error = True except ClientError: - log.warning("push-error", exc_info=True) + log.exception("push-error") + error = True if purge_required: log = self.log.bind(server=api.server_name) @@ -827,13 +834,17 @@ async def _push_metadata(self, api: APIClient, revs: List[Revision]): await api.run_purge(self.name) except ClientResponseError: log.warning("push-purge-client-error", exc_style="short") + error = True except ClientConnectionError: - log.info("push-purge-connection-error", exc_style="short") + log.warning("push-purge-connection-error", exc_style="short") + error = True except ClientError: - log.warning("push-purge-error", exc_info=True) + log.error("push-purge-error") + error = True + return error @locked(target=".backup", mode="exclusive") - async def pull_metadata(self, peers: dict, taskid: str): + async def pull_metadata(self, peers: dict, taskid: str) -> int: async def remove_dead_peer(): for r in list(self.history): if r.server and r.server not in peers: @@ -843,21 +854,24 @@ async def remove_dead_peer(): server=r.server, ) r.remove(force=True) + return False self.log.info("pull-start") async with APIClientManager(peers, taskid, self.log) as apis: - await asyncio.gather( + errors = await asyncio.gather( remove_dead_peer(), *[self._pull_metadata(apis[server]) for server in apis], ) - self.log.info("pull-end") + self.log.info("pull-end", errors=sum(errors)) + return sum(errors) - async def _pull_metadata(self, api: APIClient): + async def _pull_metadata(self, api: APIClient) -> bool: + error = False log = self.log.bind(server=api.server_name) try: await api.touch_backup(self.name) remote_revs = await api.get_revs(self) - + log.debug("pull-found-revs", revs=len(remote_revs)) except ClientResponseError as e: if e.status in [ HTTPNotFound.status_code, @@ -866,29 +880,31 @@ async def _pull_metadata(self, api: APIClient): log.debug("pull-not-found") else: log.warning("pull-client-error", exc_style="short") + error = True remote_revs = [] except ClientConnectionError: - log.info("pull-connection-error", exc_style="short") - return + log.warning("pull-connection-error", exc_style="short") + return True except ClientError: - log.warning("pull-error", exc_info=True) + log.exception("pull-error") + error = True remote_revs = [] - log.debug( - "pull-found-revs", - revs=len(remote_revs), - ) - matching_uuids = { + local_uuids = { r.uuid for r in self.history if r.server == api.server_name } remote_uuids = {r.uuid for r in remote_revs} - for uuid in matching_uuids - remote_uuids: + for uuid in local_uuids - remote_uuids: log.warning("pull-removing-unknown-rev", rev_uuid=uuid) self.find_by_uuid(uuid).remove(force=True) for r in remote_revs: + if r.uuid in local_uuids: + if r.to_dict() == self.find_by_uuid(r.uuid).to_dict(): + continue + log.debug("pull-updating-rev", rev_uid=r.uuid) + else: + log.debug("pull-new-rev", rev_uid=r.uuid) r.write_info() - log.debug( - "pull-updated-rev", - rev_uid=r.uuid, - ) + + return error diff --git a/src/backy/main.py b/src/backy/main.py index 74830edf..0d33fe9d 100644 --- a/src/backy/main.py +++ b/src/backy/main.py @@ -156,30 +156,58 @@ def client( token: str, apifunc: str, **kwargs, - ) -> None: - async def run(): + ) -> int: + async def run() -> int: + if peer and (url or token): + self.log.error( + "client-argparse-error", + _fmt_msg="--peer conflicts with --url and --token", + ) + return 1 + if bool(url) ^ bool(token): + self.log.error( + "client-argparse-error", + _fmt_msg="--url and --token require each other", + ) + return 1 if url and token: api = APIClient("", url, token, self.taskid, self.log) else: d = backy.daemon.BackyDaemon(config, self.log) d._read_config() if peer: + if peer not in d.peers: + self.log.error( + "client-peer-unknown", + _fmt_msg="The peer {peer} is not known. Select a known peer or specify --url and --token.\n" + "The following peers are known: {known}", + peer=peer, + known=", ".join(d.peers.keys()), + ) + return 1 api = APIClient.from_conf( peer, d.peers[peer], self.taskid, self.log ) else: + if "token" not in d.api_cli_default: + self.log.error( + "client-missing-defaults", + _fmt_msg="The config file is missing default parameters. Please specify --url and --token", + ) + return 1 api = APIClient.from_conf( "", d.api_cli_default, self.taskid, self.log ) async with CLIClient(api, self.log) as c: try: await getattr(c, apifunc)(**kwargs) - except ClientConnectionError as e: + except ClientConnectionError: c.log.error("connection-error", exc_style="banner") c.log.debug("connection-error", exc_info=True) - sys.exit(1) + return 1 + return 0 - asyncio.run(run()) + return asyncio.run(run()) def tags( self, @@ -212,17 +240,19 @@ def expire(self) -> None: b.expire() b.warn_pending_changes() - def push(self, config: Path): + def push(self, config: Path) -> int: d = backy.daemon.BackyDaemon(config, self.log) d._read_config() b = backy.backup.Backup(self.path, self.log) - asyncio.run(b.push_metadata(d.peers, self.taskid)) + errors = asyncio.run(b.push_metadata(d.peers, self.taskid)) + return int(bool(errors)) - def pull(self, config: Path): + def pull(self, config: Path) -> int: d = backy.daemon.BackyDaemon(config, self.log) d._read_config() b = backy.backup.Backup(self.path, self.log) - asyncio.run(b.pull_metadata(d.peers, self.taskid)) + errors = asyncio.run(b.pull_metadata(d.peers, self.taskid)) + return int(bool(errors)) def setup_argparser(): @@ -587,7 +617,10 @@ def main(): try: log.debug("parsed", func=args.func, func_args=func_args) - func(**func_args) + ret = func(**func_args) + if isinstance(ret, int): + log.debug("return-code", code=ret) + sys.exit(ret) log.debug("successful") sys.exit(0) except Exception: diff --git a/src/backy/tests/test_api.py b/src/backy/tests/test_api.py index c119b0c0..87fa9750 100644 --- a/src/backy/tests/test_api.py +++ b/src/backy/tests/test_api.py @@ -208,6 +208,24 @@ async def test_simple_sync(daemons, log): assert new_rev1.trust == rev1.trust assert new_rev1.server == "server-1" + rev1.distrust() + rev1.tags = {"manual:new"} + rev1.write_info() + + await j0.pull_metadata() + b0.scan() + + assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] + new_rev1 = b0.history[1] + assert new_rev1.backup == b0 + assert new_rev1.timestamp == rev1.timestamp + assert new_rev1.backend_type == "" + assert new_rev1.stats == rev1.stats + assert new_rev1.tags == rev1.tags + assert new_rev1.orig_tags == rev1.tags + assert new_rev1.trust == rev1.trust + assert new_rev1.server == "server-1" + new_rev1.remove() assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] assert new_rev1.tags == set() diff --git a/src/backy/tests/test_main.py b/src/backy/tests/test_main.py index 32710b19..aec7da2b 100644 --- a/src/backy/tests/test_main.py +++ b/src/backy/tests/test_main.py @@ -283,7 +283,7 @@ def test_call_client( ... D command/parsed func='client' func_args={{'config': PosixPath('...'), 'peer': None, \ 'url': None, 'token': None{", "*bool(args)}{str(args)[1:-1]}, 'apifunc': '{action}'}} ... D daemon/read-config ... -... D command/successful \n\ +... D command/return-code code=0 """ ) == utils.log_data @@ -474,7 +474,7 @@ def do_raise(*args, **kw): ... E command/failed exception_class='builtins.RuntimeError' exception_msg='test' exception>\tTraceback (most recent call last): exception>\t File ".../src/backy/main.py", line ..., in main -exception>\t func(**func_args) +exception>\t ret = func(**func_args) exception>\t File ".../src/backy/tests/test_main.py", line ..., in do_raise exception>\t raise RuntimeError("test") exception>\tRuntimeError: test From 146cb5d8b949031450550c451d91defebec43b01 Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Tue, 2 Apr 2024 12:47:59 +0200 Subject: [PATCH 12/13] Coordinate backups for the same job between servers The server with the largest number of local revisions is the leader. If the leader is offline another server will take over. This assumes that all servers share the same rng and thus schedule the backup at the same time. If this is not the case more backups than necessary may be created (but not less). --- .pre-commit-config.yaml | 2 +- ...02_125207_jb_issue_30_server_migration.rst | 3 + src/backy/api.py | 51 +- src/backy/client.py | 19 +- src/backy/conftest.py | 6 + src/backy/daemon.py | 53 +- src/backy/scheduler.py | 160 +++++-- src/backy/tests/test_api.py | 453 +++++++++++++++++- src/backy/tests/test_daemon.py | 20 +- src/backy/tests/test_scheduler.py | 2 +- src/backy/utils.py | 17 +- 11 files changed, 685 insertions(+), 101 deletions(-) create mode 100644 changelog.d/20240402_125207_jb_issue_30_server_migration.rst diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b093fc4c..93a9bdce 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -32,7 +32,7 @@ repos: - types-PyYAML==5.4.0 - types-setuptools - types-tzlocal==4.2 - - types-aiofiles==23.2 + - types-aiofiles==23.2.0.20240311 exclude: tests args: - --check-untyped-defs diff --git a/changelog.d/20240402_125207_jb_issue_30_server_migration.rst b/changelog.d/20240402_125207_jb_issue_30_server_migration.rst new file mode 100644 index 00000000..0c165fb7 --- /dev/null +++ b/changelog.d/20240402_125207_jb_issue_30_server_migration.rst @@ -0,0 +1,3 @@ +.. A new scriv changelog fragment. + +- Coordinate backups for the same job between servers diff --git a/src/backy/api.py b/src/backy/api.py index 159c332f..d59ee6ff 100644 --- a/src/backy/api.py +++ b/src/backy/api.py @@ -2,7 +2,7 @@ import re from json import JSONEncoder from pathlib import Path -from typing import Any, List, Tuple +from typing import TYPE_CHECKING, Any, List, Tuple from aiohttp import hdrs, web from aiohttp.web_exceptions import ( @@ -18,12 +18,14 @@ from aiohttp.web_runner import AppRunner, TCPSite from structlog.stdlib import BoundLogger -import backy.daemon from backy.backup import Backup from backy.revision import Revision from backy.scheduler import Job from backy.utils import generate_taskid +if TYPE_CHECKING: + from backy.daemon import BackyDaemon + class BackyJSONEncoder(JSONEncoder): def default(self, o: Any) -> Any: @@ -36,7 +38,7 @@ def default(self, o: Any) -> Any: class BackyAPI: - daemon: "backy.daemon.BackyDaemon" + daemon: "BackyDaemon" sites: dict[Tuple[str, int], TCPSite] runner: AppRunner tokens: dict @@ -144,7 +146,9 @@ async def to_json(self, request: web.Request, handler): else: return web.json_response(resp, dumps=BackyJSONEncoder().encode) - async def get_status(self, request: web.Request) -> List[dict]: + async def get_status( + self, request: web.Request + ) -> List["BackyDaemon.StatusDict"]: filter = request.query.get("filter", None) request["log"].info("get-status", filter=filter) if filter: @@ -160,7 +164,7 @@ async def get_jobs(self, request: web.Request) -> List[Job]: return list(self.daemon.jobs.values()) async def get_job(self, request: web.Request) -> Job: - name = request.match_info.get("job_name", None) + name = request.match_info.get("job_name") request["log"].info("get-job", name=name) try: return self.daemon.jobs[name] @@ -176,40 +180,38 @@ async def run_job(self, request: web.Request): async def list_backups(self, request: web.Request) -> List[str]: request["log"].info("list-backups") - return await self.daemon.find_dead_backups() + return list(self.daemon.dead_backups.keys()) - async def get_backup(self, request: web.Request) -> Backup: - name = request.match_info.get("backup_name", None) + async def get_backup( + self, request: web.Request, allow_active: bool + ) -> Backup: + name = request.match_info.get("backup_name") request["log"].info("get-backups", name=name) + if name in self.daemon.dead_backups: + return self.daemon.dead_backups[name] if name in self.daemon.jobs: + if allow_active: + return self.daemon.jobs[name].backup request["log"].info("get-backups-forbidden", name=name) raise HTTPForbidden() - try: - path = Path(self.daemon.base_dir).joinpath(name).resolve() - if ( - not path.exists() - or Path(self.daemon.base_dir).resolve() not in path.parents - ): - raise FileNotFoundError - return Backup(path, request["log"]) - except FileNotFoundError: - request["log"].info("get-backups-not-found", name=name) - raise HTTPNotFound() + request["log"].info("get-backups-not-found", name=name) + raise HTTPNotFound() async def run_purge(self, request: web.Request): - backup = await self.get_backup(request) + backup = await self.get_backup(request, False) request["log"].info("run-purge", name=backup.name) backup.set_purge_pending() raise HTTPAccepted() async def touch_backup(self, request: web.Request): - backup = await self.get_backup(request) + backup = await self.get_backup(request, True) request["log"].info("touch-backup", name=backup.name) backup.touch() async def get_revs(self, request: web.Request) -> List[Revision]: - backup = await self.get_backup(request) + backup = await self.get_backup(request, True) request["log"].info("get-revs", name=backup.name) + backup.scan() return backup.get_history( local=True, clean=request.query.get("only_clean", "") == "1" ) @@ -223,8 +225,8 @@ async def put_tags(self, request: web.Request): request["log"].info("put-tags-bad-request") raise HTTPBadRequest() autoremove = request.query.get("autoremove", "") == "1" - spec = request.match_info.get("rev_spec", None) - backup = await self.get_backup(request) + spec = request.match_info.get("rev_spec") + backup = await self.get_backup(request, False) request["log"].info( "put-tags", name=backup.name, @@ -233,6 +235,7 @@ async def put_tags(self, request: web.Request): spec=spec, autoremove=autoremove, ) + backup.scan() try: if not backup.tags( "set", diff --git a/src/backy/client.py b/src/backy/client.py index 7ce69963..c16e9a68 100644 --- a/src/backy/client.py +++ b/src/backy/client.py @@ -2,7 +2,7 @@ import re import sys from asyncio import get_running_loop -from typing import Dict, List +from typing import TYPE_CHECKING, Dict, Iterator, List import aiohttp import humanize @@ -16,32 +16,35 @@ from backy.revision import Revision from backy.utils import format_datetime_local +if TYPE_CHECKING: + from backy.daemon import BackyDaemon + class APIClientManager: connector: TCPConnector - peers: dict + peers: dict[str, dict] clients: dict[str, "APIClient"] taskid: str log: BoundLogger - def __init__(self, peers, taskid, log): + def __init__(self, peers: Dict[str, dict], taskid: str, log: BoundLogger): self.connector = TCPConnector() self.peers = peers self.clients = dict() self.taskid = taskid self.log = log.bind(subsystem="APIClientManager") - def __getitem__(self, name): + def __getitem__(self, name: str) -> "APIClient": if name and name not in self.clients: self.clients[name] = APIClient.from_conf( name, self.peers[name], self.taskid, self.log, self.connector ) return self.clients[name] - def __iter__(self): + def __iter__(self) -> Iterator[str]: return iter(self.peers) - async def close(self): + async def close(self) -> None: for c in self.clients.values(): await c.close() await self.connector.close() @@ -89,7 +92,9 @@ def from_conf(cls, server_name, conf, *args, **kwargs): **kwargs, ) - async def fetch_status(self, filter: str = ""): + async def fetch_status( + self, filter: str = "" + ) -> List["BackyDaemon.StatusDict"]: async with self.session.get( "/v1/status", params={"filter": filter} ) as response: diff --git a/src/backy/conftest.py b/src/backy/conftest.py index be70ef1e..980c9448 100644 --- a/src/backy/conftest.py +++ b/src/backy/conftest.py @@ -1,6 +1,7 @@ import datetime import json import os +import random import shutil from unittest import mock from zoneinfo import ZoneInfo @@ -71,6 +72,11 @@ class Clock(object): return clock +@pytest.fixture +def seed_random(monkeypatch): + random.seed(0) + + @pytest.fixture def schedule(): schedule = backy.schedule.Schedule() diff --git a/src/backy/daemon.py b/src/backy/daemon.py index a18951eb..03eb1764 100644 --- a/src/backy/daemon.py +++ b/src/backy/daemon.py @@ -1,4 +1,5 @@ import asyncio +import datetime import fcntl import os import os.path as p @@ -6,7 +7,7 @@ import sys import time from pathlib import Path -from typing import IO, List, Optional, Pattern +from typing import IO, List, Optional, Pattern, TypedDict import aiofiles.os as aos import aioshutil @@ -14,6 +15,7 @@ from structlog.stdlib import BoundLogger from .api import BackyAPI +from .backup import Backup from .revision import filter_manual_tags from .schedule import Schedule from .scheduler import Job @@ -36,6 +38,7 @@ class BackyDaemon(object): config: dict schedules: dict[str, Schedule] jobs: dict[str, Job] + dead_backups: dict[str, Backup] backup_semaphores: dict[str, asyncio.BoundedSemaphore] log: BoundLogger @@ -51,6 +54,7 @@ def __init__(self, config_file: Path, log: BoundLogger): self.schedules = {} self.backup_semaphores = {} self.jobs = {} + self.dead_backups = {} self._lock = None self.reload_api = asyncio.Event() self.api_addrs = ["::1", "127.0.0.1"] @@ -128,6 +132,21 @@ def _apply_config(self): del self.jobs[name] self.log.info("deleted-job", job_name=name) + self.dead_backups.clear() + for b in os.scandir(self.base_dir): + if b.name in self.jobs or not b.is_dir(follow_symlinks=False): + continue + try: + self.dead_backups[b.name] = Backup( + self.base_dir / b.name, + self.log.bind(job_name=b.name), + ) + self.log.info("found-backup", job_name=b.name) + except Exception: + self.log.info( + "invalid-backup", job_name=b.name, exc_style="short" + ) + if ( not self.backup_semaphores or self.backup_semaphores["slow"]._bound_value != self.worker_limit # type: ignore @@ -257,9 +276,26 @@ async def shutdown_loop(self): self.log.info("stopping-loop") self.loop.stop() - def status(self, filter_re: Optional[Pattern[str]] = None) -> List[dict]: + class StatusDict(TypedDict): + job: str + sla: str + sla_overdue: int + status: str + last_time: Optional[datetime.datetime] + last_tags: Optional[str] + last_duration: Optional[float] + next_time: Optional[datetime.datetime] + next_tags: Optional[str] + manual_tags: str + quarantine_reports: int + unsynced_revs: int + local_revs: int + + def status( + self, filter_re: Optional[Pattern[str]] = None + ) -> List[StatusDict]: """Collects status information for all jobs.""" - result = [] + result: List["BackyDaemon.StatusDict"] = [] for job in list(self.jobs.values()): if filter_re and not filter_re.search(job.name): continue @@ -297,6 +333,9 @@ def status(self, filter_re: Optional[Pattern[str]] = None) -> List[dict]: manual_tags=", ".join(manual_tags), quarantine_reports=len(job.backup.quarantine.report_ids), unsynced_revs=unsynced_revs, + local_revs=len( + job.backup.get_history(clean=True, local=True) + ), ) ) return result @@ -340,14 +379,6 @@ async def purge_pending_backups(self): self.log.exception("purge-pending") await asyncio.sleep(24 * 60 * 60) - async def find_dead_backups(self) -> List[str]: - self.log.debug("scanning-backups") - return [ - b.name - for b in await aos.scandir(self.base_dir) - if await is_dir_no_symlink(b.path) and b.name not in self.jobs - ] - def main(config_file: Path, log: BoundLogger): # pragma: no cover global daemon diff --git a/src/backy/scheduler.py b/src/backy/scheduler.py index 10974c2a..eb04d5f9 100644 --- a/src/backy/scheduler.py +++ b/src/backy/scheduler.py @@ -7,16 +7,18 @@ import subprocess from datetime import timedelta from pathlib import Path -from typing import Optional +from typing import TYPE_CHECKING, Literal, Optional, Set import yaml +from aiohttp import ClientError from structlog.stdlib import BoundLogger -import backy.daemon import backy.utils from .backup import Backup +from .client import APIClientManager from .ext_deps import BACKY_CMD +from .schedule import Schedule from .utils import ( SafeFile, format_datetime_local, @@ -24,6 +26,9 @@ time_or_event, ) +if TYPE_CHECKING: + from backy.daemon import BackyDaemon + class Job(object): name: str @@ -36,7 +41,7 @@ class Job(object): backup: Backup logfile: Path last_config: Optional[dict] = None - daemon: "backy.daemon.BackyDaemon" + daemon: "BackyDaemon" run_immediately: asyncio.Event errors: int = 0 backoff: int = 0 @@ -45,7 +50,7 @@ class Job(object): _task: Optional[asyncio.Task] = None - def __init__(self, daemon, name, log): + def __init__(self, daemon: "BackyDaemon", name: str, log: BoundLogger): self.daemon = daemon self.name = name self.log = log.bind(job_name=name, subsystem="job") @@ -53,7 +58,7 @@ def __init__(self, daemon, name, log): self.path = self.daemon.base_dir / self.name self.logfile = self.path / "backy.log" - def configure(self, config): + def configure(self, config: dict) -> None: self.source = config["source"] self.schedule_name = config["schedule"] self.update_config() @@ -61,7 +66,7 @@ def configure(self, config): self.last_config = config @property - def spread(self): + def spread(self) -> int: seed = int(hashlib.md5(self.name.encode("utf-8")).hexdigest(), 16) limit = max(x["interval"] for x in self.schedule.schedule.values()) limit = int(limit.total_seconds()) @@ -70,7 +75,7 @@ def spread(self): return generator.randint(0, limit) @property - def sla(self): + def sla(self) -> bool: """Is the SLA currently held? The SLA being held is only reflecting the current status. @@ -82,11 +87,11 @@ def sla(self): return not self.sla_overdue @property - def sla_overdue(self): + def sla_overdue(self) -> int: """Amount of time the SLA is currently overdue.""" if not self.backup.clean_history: return 0 - if self.status == "running": + if self.status.startswith("running"): return 0 age = backy.utils.now() - self.backup.clean_history[-1].timestamp max_age = min(x["interval"] for x in self.schedule.schedule.values()) @@ -95,14 +100,14 @@ def sla_overdue(self): return 0 @property - def schedule(self): + def schedule(self) -> Schedule: return self.daemon.schedules[self.schedule_name] - def update_status(self, status): + def update_status(self, status: str) -> None: self.status = status self.log.debug("updating-status", status=self.status) - def update_config(self): + def update_config(self) -> None: """Writes config file for 'backy backup' subprocess.""" # We do not want to create leading directories, only @@ -119,7 +124,7 @@ def update_config(self): if config.exists() and filecmp.cmp(config, f.name): raise ValueError("not changed") - def to_dict(self): + def to_dict(self) -> dict: return { "name": self.name, "status": self.status, @@ -127,14 +132,84 @@ def to_dict(self): "schedule": self.schedule.to_dict(), } - async def _wait_for_deadline(self): + async def _wait_for_deadline(self) -> Optional[Literal[True]]: + assert self.next_time self.update_status("waiting for deadline") trigger = await time_or_event(self.next_time, self.run_immediately) self.run_immediately.clear() self.log.info("woken", trigger=trigger) return trigger - async def run_forever(self): + async def _wait_for_leader(self, next_time: datetime.datetime) -> bool: + api = None + try: + api = APIClientManager(self.daemon.peers, self.taskid, self.log) + statuses = await asyncio.gather( + *[api[server].fetch_status(f"^{self.name}$") for server in api], + return_exceptions=True, + ) + leader = None + leader_revs = len(self.backup.get_history(clean=True, local=True)) + leader_status: "BackyDaemon.StatusDict" + self.log.info("local-revs", local_revs=leader_revs) + for server, status in zip(api, statuses): + log = self.log.bind(server=server) + if isinstance(status, BaseException): + log.info( + "server-unavailable", exc_info=status, exc_style="short" + ) + continue + num_remote_revs = status[0]["local_revs"] + log.info("duplicate-job", remote_revs=num_remote_revs) + if num_remote_revs > leader_revs: + leader_revs = num_remote_revs + leader = server + leader_status = status[0] + + log = self.log.bind(leader=leader) + log.info("leader-found", leader_revs=leader_revs) + if not leader: + return False + + self.update_status(f"monitoring ({leader})") + res = leader_status + while True: + if ( + res["last_time"] + and (next_time - res["last_time"]).total_seconds() < 5 * 60 + ): + # there was a backup in the last 5min + log.info("leader-finished") + return True + if not res["status"]: + log.info("leader-stopped") + return False + if res["next_time"] and ( + (res["next_time"] - next_time).total_seconds() > 5 * 60 + ): + # not currently running or scheduled in the next 5min + log.info("leader-not-scheduled") + return False + + if await backy.utils.delay_or_event(300, self.run_immediately): + self.run_immediately.clear() + log.info("run-immediately-triggered") + return False + try: + res = (await api[leader].fetch_status(f"^{self.name}$"))[0] + except ClientError: + log.warning("leader-failed", exc_style="short") + return False + except asyncio.CancelledError: + raise + except Exception: + self.log.exception("_wait_for_leader-failed") + return False + finally: + if api: + await api.close() + + async def run_forever(self) -> None: """Generate backup tasks for this job. Tasks are based on the ideal next time in the future and @@ -178,7 +253,7 @@ async def run_forever(self): next_time=format_datetime_local(self.next_time)[0], next_tags=", ".join(next_tags), ) - await self._wait_for_deadline() + run_immediately = await self._wait_for_deadline() # The UI shouldn't show a next any longer now that we have already # triggered. @@ -186,23 +261,31 @@ async def run_forever(self): self.next_tags = None try: - speed = "slow" - if ( - self.backup.clean_history - and self.backup.clean_history[-1].stats["duration"] < 600 + self.update_status("checking neighbours") + if not run_immediately and await self._wait_for_leader( + next_time ): - speed = "fast" - self.update_status(f"waiting for worker slot ({speed})") - - async with self.daemon.backup_semaphores[speed]: - self.update_status(f"running ({speed})") - - await self.run_backup(next_tags) await self.pull_metadata() - await self.run_expiry() - await self.push_metadata() - await self.run_purge() await self.run_callback() + else: + speed = "slow" + if ( + self.backup.clean_history + and self.backup.clean_history[-1].stats["duration"] + < 600 + ): + speed = "fast" + self.update_status(f"waiting for worker slot ({speed})") + + async with self.daemon.backup_semaphores[speed]: + self.update_status(f"running ({speed})") + + await self.run_backup(next_tags) + await self.pull_metadata() + await self.run_expiry() + await self.push_metadata() + await self.run_purge() + await self.run_callback() except asyncio.CancelledError: raise except Exception: @@ -224,7 +307,7 @@ async def run_forever(self): self.backoff = 0 self.update_status("finished") - async def pull_metadata(self): + async def pull_metadata(self) -> None: self.log.info("pull-metadata-started") proc = await asyncio.create_subprocess_exec( BACKY_CMD, @@ -258,7 +341,7 @@ async def pull_metadata(self): pass raise - async def push_metadata(self): + async def push_metadata(self) -> None: self.log.info("push-metadata-started") proc = await asyncio.create_subprocess_exec( BACKY_CMD, @@ -292,7 +375,7 @@ async def push_metadata(self): pass raise - async def run_backup(self, tags): + async def run_backup(self, tags: Set[str]) -> None: self.log.info("backup-started", tags=", ".join(tags)) proc = await asyncio.create_subprocess_exec( BACKY_CMD, @@ -329,7 +412,7 @@ async def run_backup(self, tags): pass raise - async def run_expiry(self): + async def run_expiry(self) -> None: self.log.info("expiry-started") proc = await asyncio.create_subprocess_exec( BACKY_CMD, @@ -365,7 +448,7 @@ async def run_expiry(self): pass raise - async def run_purge(self): + async def run_purge(self) -> None: self.log.info("purge-started") proc = await asyncio.create_subprocess_exec( BACKY_CMD, @@ -397,7 +480,7 @@ async def run_purge(self): pass raise - async def run_callback(self): + async def run_callback(self) -> None: if not self.daemon.backup_completed_callback: self.log.debug("callback-not-configured") return @@ -453,17 +536,18 @@ async def run_callback(self): pass raise - def start(self): + def start(self) -> None: assert self._task is None assert self.daemon.loop self._task = self.daemon.loop.create_task( self.run_forever(), name=f"backup-loop-{self.name}" ) - def stop(self): + def stop(self) -> None: # XXX make shutdown graceful and let a previous run finish ... # schedule a reload after that. if self._task: self.log.info("stop") self._task.cancel() self._task = None + self.update_status("") diff --git a/src/backy/tests/test_api.py b/src/backy/tests/test_api.py index 87fa9750..537bfc36 100644 --- a/src/backy/tests/test_api.py +++ b/src/backy/tests/test_api.py @@ -1,15 +1,21 @@ import asyncio +import datetime import os import os.path as p import shutil +from functools import partial +from typing import List +from unittest.mock import Mock import pytest import yaml from aiohttp.test_utils import unused_port +import backy.utils from backy import utils from backy.daemon import BackyDaemon from backy.revision import Revision +from backy.tests import Ellipsis async def wait_api_ready(daemon): @@ -87,6 +93,7 @@ def fake_create_task(coro, *args, **kwargs): asyncio.get_running_loop(), "create_task", fake_create_task ) daemon.start(asyncio.get_running_loop()) + daemon.reload_api.set() daemon.api_server() await wait_api_ready(daemon) @@ -148,6 +155,7 @@ async def test_remove_peer(daemons, log): async def test_remove_remote_backup(daemons, log): + """delete all revs if server fails""" ds = await daemons(2) j0 = ds[0].jobs["test01"] @@ -160,18 +168,29 @@ async def test_remove_remote_backup(daemons, log): assert [r.uuid for r in b0.history] == [rev0.uuid] - del ds[1].jobs["test01"] + # pull from active job await j0.pull_metadata() b0.scan() assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] + del ds[1].config["jobs"]["test01"] + ds[1]._apply_config() + + # pull from dead job + await j0.pull_metadata() + b0.scan() + assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] + + # pull from dead job with missing dir shutil.rmtree(b1.path) + # works without reloading because backup.touch() will fail await j0.pull_metadata() b0.scan() assert [r.uuid for r in b0.history] == [rev0.uuid] async def test_simple_sync(daemons, log): + """pull and push changes""" ds = await daemons(3) j0 = ds[0].jobs["test01"] @@ -182,18 +201,14 @@ async def test_simple_sync(daemons, log): b1 = j1.backup rev1 = create_rev(b1, log) + # ignore offline servers ds[2].api_addrs = [] ds[2].reload_api.set() await wait_api_ready(ds[2]) assert [r.uuid for r in b0.history] == [rev0.uuid] - await j0.pull_metadata() - b0.scan() - - assert [r.uuid for r in b0.history] == [rev0.uuid] - - del ds[1].jobs["test01"] + # pull new rev await j0.pull_metadata() b0.scan() @@ -208,9 +223,11 @@ async def test_simple_sync(daemons, log): assert new_rev1.trust == rev1.trust assert new_rev1.server == "server-1" + # pull changed rev rev1.distrust() rev1.tags = {"manual:new"} rev1.write_info() + rev1.backup.scan() await j0.pull_metadata() b0.scan() @@ -226,11 +243,26 @@ async def test_simple_sync(daemons, log): assert new_rev1.trust == rev1.trust assert new_rev1.server == "server-1" + # mark rev for deletion new_rev1.remove() + new_rev1.backup.scan() assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] assert new_rev1.tags == set() assert new_rev1.orig_tags == rev1.tags + # refuse push to active job + await j0.push_metadata() + b0.scan() + b1.scan() + + assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] + assert [r.uuid for r in b1.history] == [rev1.uuid] + assert b0.history[1].pending_changes + + # accept push to dead job + del ds[1].config["jobs"]["test01"] + ds[1]._apply_config() + await j0.push_metadata() b0.scan() b1.scan() @@ -241,6 +273,9 @@ async def test_simple_sync(daemons, log): async def test_split_brain(daemons, log): + """split into 2 isolated groups with 2 severs and later allow communication + server 0 and 2 contain dead jobs + """ ds = await daemons(4) await modify_authtokens(ds, [0, 1], [2, 3], allow=False, bidirectional=True) @@ -256,12 +291,12 @@ async def test_split_brain(daemons, log): await j.pull_metadata() j.backup.scan() - for b, r in zip(bs, revs): - assert [r.uuid for r in b.history] == [r.uuid] - - del ds[0].jobs["test01"] - del ds[2].jobs["test01"] + del ds[0].config["jobs"]["test01"] + ds[0]._apply_config() + del ds[2].config["jobs"]["test01"] + ds[2]._apply_config() + # pull from visible servers await js[1].pull_metadata() await js[3].pull_metadata() @@ -277,6 +312,7 @@ async def test_split_brain(daemons, log): revs[3].uuid, ] + # every server can see server 0 await modify_authtokens(ds, [2, 3], [0], allow=True) await js[3].pull_metadata() @@ -288,21 +324,29 @@ async def test_split_brain(daemons, log): revs[3].uuid, ] + # create conflicting change on local copy of rev[0] bs[1].tags("add", bs[1].history[0].uuid, {"manual:new1"}) bs[3].history[0].remove() + assert bs[1].history[0].pending_changes + assert bs[3].history[0].pending_changes + # first push wins await js[1].push_metadata() await js[3].push_metadata() # fails bs[0].scan() assert bs[0].history[0].tags == {"manual:new1", "manual:a"} + # server 3 updates copy with correct data await js[1].pull_metadata() await js[3].pull_metadata() bs[1].scan() bs[3].scan() + assert not bs[1].history[0].pending_changes + assert not bs[3].history[0].pending_changes + assert [(r.uuid, r.tags) for r in bs[1].history] == [ (revs[0].uuid, {"manual:a", "manual:new1"}), (revs[1].uuid, {"manual:a"}), @@ -317,6 +361,7 @@ async def test_split_brain(daemons, log): ds, [0, 1, 2, 3], [0, 1, 2, 3], allow=True, bidirectional=True ) + # every server gets the same view await js[1].pull_metadata() await js[3].pull_metadata() @@ -327,9 +372,393 @@ async def test_split_brain(daemons, log): (revs[0].uuid, {"manual:a", "manual:new1"}), (revs[1].uuid, {"manual:a"}), (revs[2].uuid, {"manual:a"}), + (revs[3].uuid, {"manual:a"}), ] assert [(r.uuid, r.tags) for r in bs[3].history] == [ (revs[0].uuid, {"manual:a", "manual:new1"}), + (revs[1].uuid, {"manual:a"}), (revs[2].uuid, {"manual:a"}), (revs[3].uuid, {"manual:a"}), ] + + +@pytest.fixture +def jobs_dry_run(daemons, monkeypatch, clock, log, seed_random, tz_berlin): + async def f(start_delays): + async def null_coroutine(*args, delay=0.1, **kw): + await asyncio.sleep(delay) + + async def run_backup(job, tags, delta=datetime.timedelta()): + r = Revision.create(job.backup, tags, log) + r.timestamp = backy.utils.now() + delta + r.stats["duration"] = 1 + r.write_info() + + # This patch causes a single run through the generator loop. + def update_status(job, orig_update_status, status): + orig_update_status(status) + if status in ("finished", "failed"): + job.stop() + + orig_delay_or_event = backy.utils.delay_or_event + + async def delay_or_event(delay, event): + if delay == 300: # _wait_for_leader loop + delay = 0.1 + return await orig_delay_or_event(delay, event) + + monkeypatch.setattr(backy.utils, "delay_or_event", delay_or_event) + + ds: List[BackyDaemon] = await daemons(len(start_delays)) + jobs = [d.jobs["test01"] for d in ds] + + for job, start_delay in zip(jobs, start_delays): + monkeypatch.setattr(job, "run_expiry", null_coroutine) + monkeypatch.setattr(job, "run_purge", null_coroutine) + monkeypatch.setattr(job, "run_callback", null_coroutine) + monkeypatch.setattr(job, "run_backup", partial(run_backup, job)) + monkeypatch.setattr(job, "pull_metadata", null_coroutine) + monkeypatch.setattr(job, "push_metadata", null_coroutine) + monkeypatch.setattr( + job, + "update_status", + partial(update_status, job, job.update_status), + ) + monkeypatch.setattr( + job.schedule, + "next", + Mock( + return_value=( + backy.utils.now() + + datetime.timedelta(seconds=start_delay), + {"daily"}, + ) + ), + ) + + return jobs + + return f + + +async def test_wait_for_leader_parallel(jobs_dry_run): + """ + server 0 (leader) completes a successful backup + server 1 waits for server 0 until a revision was created + """ + + job0, job1 = await jobs_dry_run([0.5, 0.1]) + # server 0 is leader + await job0.run_backup({"daily"}, delta=datetime.timedelta(hours=-1)) + + utils.log_data = "" + + job0.start() + job1.start() + + while job0._task is not None or job1._task is not None: + await asyncio.sleep(0.1) + + assert ( + Ellipsis( + """\ +... +... AAAA I test01[A4WN] job/waiting [server-0] next_tags='daily' next_time='2015-09-01 09:06:47' +... AAAA D test01[A4WN] job/updating-status [server-0] status='waiting for deadline' +... +... AAAA I test01[N6PW] job/waiting [server-1] next_tags='daily' next_time='2015-09-01 09:06:47' +... AAAA D test01[N6PW] job/updating-status [server-1] status='waiting for deadline' +... AAAA I test01[N6PW] job/woken [server-1] trigger=None +... AAAA D test01[N6PW] job/updating-status [server-1] status='checking neighbours' +... +... AAAA I test01[N6PW] job/local-revs [server-1] local_revs=0 +... AAAA I test01[N6PW] job/duplicate-job [server-1] remote_revs=1 server='server-0' +... AAAA I test01[N6PW] job/leader-found [server-1] leader='server-0' leader_revs=1 +... AAAA D test01[N6PW] job/updating-status [server-1] status='monitoring (server-0)' +... +... AAAA I test01[A4WN] job/woken [server-0] trigger=None +... AAAA D test01[A4WN] job/updating-status [server-0] status='checking neighbours' +... +... AAAA I test01[A4WN] job/local-revs [server-0] local_revs=1 +... AAAA I test01[A4WN] job/duplicate-job [server-0] remote_revs=0 server='server-1' +... AAAA I test01[A4WN] job/leader-found [server-0] leader=None leader_revs=1 +... AAAA D test01[A4WN] job/updating-status [server-0] status='waiting for worker slot (fast)' +... AAAA D test01[A4WN] job/updating-status [server-0] status='running (fast)' +... AAAA D revision/writing-info revision_uuid='...' tags='daily' +... +... AAAA I test01[N6PW] job/leader-finished [server-1] leader='server-0' +... AAAA D test01[N6PW] job/updating-status [server-1] status='finished' +... +... AAAA D test01[A4WN] job/updating-status [server-0] status='finished' +... +""" + ) + == utils.log_data + ) + + +async def test_wait_for_leader_delayed(jobs_dry_run): + """ + server 1 will not wait for server 0 (leader) due to clock differences + """ + + job0, job1 = await jobs_dry_run([500, 0.1]) + # server 0 is leader + await job0.run_backup({"daily"}, delta=datetime.timedelta(hours=-1)) + + utils.log_data = "" + + job0.start() + job1.start() + + while job1._task is not None: + await asyncio.sleep(0.1) + + job0.stop() + + assert ( + Ellipsis( + """\ +... +... AAAA I test01[A4WN] job/waiting [server-0] next_tags='daily' next_time='2015-09-01 09:15:07' +... AAAA D test01[A4WN] job/updating-status [server-0] status='waiting for deadline' +... +... AAAA I test01[N6PW] job/waiting [server-1] next_tags='daily' next_time='2015-09-01 09:06:47' +... AAAA D test01[N6PW] job/updating-status [server-1] status='waiting for deadline' +... AAAA I test01[N6PW] job/woken [server-1] trigger=None +... AAAA D test01[N6PW] job/updating-status [server-1] status='checking neighbours' +... +... AAAA I test01[N6PW] job/local-revs [server-1] local_revs=0 +... AAAA I test01[N6PW] job/duplicate-job [server-1] remote_revs=1 server='server-0' +... AAAA I test01[N6PW] job/leader-found [server-1] leader='server-0' leader_revs=1 +... AAAA D test01[N6PW] job/updating-status [server-1] status='monitoring (server-0)' +... AAAA I test01[N6PW] job/leader-not-scheduled [server-1] leader='server-0' +... AAAA D test01[N6PW] job/updating-status [server-1] status='waiting for worker slot (slow)' +... AAAA D test01[N6PW] job/updating-status [server-1] status='running (slow)' +... AAAA D revision/writing-info revision_uuid='...' tags='daily' +... AAAA D test01[N6PW] job/updating-status [server-1] status='finished' +... +""" + ) + == utils.log_data + ) + + +async def test_wait_for_leader_crash(jobs_dry_run, monkeypatch): + """ + server 1 will wait for server 0 (leader) until it becomes unreachable and then create a backup itself + server 2 (offline) will be ignored + """ + + job0, job1, job2 = await jobs_dry_run([0.5, 0.1, 0]) + # server 0 is leader + await job0.run_backup({"daily"}, delta=datetime.timedelta(hours=-1)) + + job2.daemon.api_addrs = [] + job2.daemon.reload_api.set() + await wait_api_ready(job2.daemon) + + async def crash(*args, **kw): + # api_addrs does not work here because the connection is already established + job0.daemon.api_tokens = {} + job0.daemon.reload_api.set() + + monkeypatch.setattr(job0, "run_backup", crash) + + utils.log_data = "" + + job0.start() + job1.start() + + while job1._task is not None: + await asyncio.sleep(0.1) + + job0.stop() + + assert ( + Ellipsis( + """\ +... +... AAAA I test01[A4WN] job/waiting [server-0] next_tags='daily' next_time='2015-09-01 09:06:47' +... AAAA D test01[A4WN] job/updating-status [server-0] status='waiting for deadline' +... +... AAAA I test01[N6PW] job/waiting [server-1] next_tags='daily' next_time='2015-09-01 09:06:47' +... AAAA D test01[N6PW] job/updating-status [server-1] status='waiting for deadline' +... AAAA I test01[N6PW] job/woken [server-1] trigger=None +... AAAA D test01[N6PW] job/updating-status [server-1] status='checking neighbours' +... +... AAAA I test01[N6PW] job/local-revs [server-1] local_revs=0 +... AAAA I test01[N6PW] job/duplicate-job [server-1] remote_revs=1 server='server-0' +... AAAA I test01[N6PW] job/server-unavailable [server-1] exception_class='aiohttp.client_exceptions.ClientConnectorError' exception_msg="Cannot connect to host ... ssl:default [Connect call failed (...)]" server='server-2' +... AAAA I test01[N6PW] job/leader-found [server-1] leader='server-0' leader_revs=1 +... AAAA D test01[N6PW] job/updating-status [server-1] status='monitoring (server-0)' +... +... AAAA I test01[A4WN] job/local-revs [server-0] local_revs=1 +... AAAA I test01[A4WN] job/duplicate-job [server-0] remote_revs=0 server='server-1' +... AAAA I test01[A4WN] job/server-unavailable [server-0] exception_class='aiohttp.client_exceptions.ClientConnectorError' exception_msg="Cannot connect to host ... ssl:default [Connect call failed (...)]" server='server-2' +... AAAA I test01[A4WN] job/leader-found [server-0] leader=None leader_revs=1 +... AAAA D test01[A4WN] job/updating-status [server-0] status='waiting for worker slot (fast)' +... AAAA D test01[A4WN] job/updating-status [server-0] status='running (fast)' +... AAAA I daemon/api-reconfigure [server-0] \n\ +... +... AAAA W test01[N6PW] job/leader-failed [server-1] exception_class='aiohttp.client_exceptions.ClientResponseError' exception_msg="401, message='Unauthorized', url=URL('...')" leader='server-0' +... AAAA D test01[N6PW] job/updating-status [server-1] status='waiting for worker slot (slow)' +... AAAA D test01[N6PW] job/updating-status [server-1] status='running (slow)' +... AAAA D revision/writing-info revision_uuid='...' tags='daily' +... AAAA D test01[A4WN] job/updating-status [server-0] status='finished' +... +... AAAA D test01[N6PW] job/updating-status [server-1] status='finished' +... +""" + ) + == utils.log_data + ) + + +async def test_wait_for_leader_stopped(jobs_dry_run): + """ + server 1 will ignore server 0 (leader, with a stopped job) + """ + + job0, job1 = await jobs_dry_run([0.5, 0.1]) + # server 0 is leader + await job0.run_backup({"daily"}, delta=datetime.timedelta(hours=-1)) + + utils.log_data = "" + + job1.start() + + while job1._task is not None: + await asyncio.sleep(0.1) + + assert ( + Ellipsis( + """\ +... +... AAAA I test01[A4WN] job/waiting [server-1] next_tags='daily' next_time='2015-09-01 09:06:47' +... AAAA D test01[A4WN] job/updating-status [server-1] status='waiting for deadline' +... AAAA I test01[A4WN] job/woken [server-1] trigger=None +... AAAA D test01[A4WN] job/updating-status [server-1] status='checking neighbours' +... +... AAAA I test01[A4WN] job/local-revs [server-1] local_revs=0 +... AAAA I test01[A4WN] job/duplicate-job [server-1] remote_revs=1 server='server-0' +... AAAA I test01[A4WN] job/leader-found [server-1] leader='server-0' leader_revs=1 +... AAAA D test01[A4WN] job/updating-status [server-1] status='monitoring (server-0)' +... AAAA I test01[A4WN] job/leader-stopped [server-1] leader='server-0' +... AAAA D test01[A4WN] job/updating-status [server-1] status='waiting for worker slot (slow)' +... AAAA D test01[A4WN] job/updating-status [server-1] status='running (slow)' +... AAAA D revision/writing-info revision_uuid='...' tags='daily' +... AAAA D test01[A4WN] job/updating-status [server-1] status='finished' +... +""" + ) + == utils.log_data + ) + + +async def test_wait_for_leader_ambiguous_leader(jobs_dry_run, monkeypatch): + """ + server 0 and server 1 have the same amount of revisions and will both create a new one + """ + + job0, job1 = await jobs_dry_run([0.7, 0.0]) + + async def noop(*args, **kw): + # do not create a rev + pass + + monkeypatch.setattr(job1, "run_backup", noop) + + utils.log_data = "" + + job0.start() + job1.start() + + while job0._task is not None or job1._task is not None: + await asyncio.sleep(0.1) + + assert ( + Ellipsis( + """\ +... +... AAAA I test01[A4WN] job/waiting [server-0] next_tags='daily' next_time='2015-09-01 09:06:47' +... AAAA D test01[A4WN] job/updating-status [server-0] status='waiting for deadline' +... +... AAAA I test01[N6PW] job/waiting [server-1] next_tags='daily' next_time='2015-09-01 09:06:47' +... AAAA D test01[N6PW] job/updating-status [server-1] status='waiting for deadline' +... AAAA I test01[N6PW] job/woken [server-1] trigger=None +... AAAA D test01[N6PW] job/updating-status [server-1] status='checking neighbours' +... +... AAAA I test01[N6PW] job/local-revs [server-1] local_revs=0 +... AAAA I test01[N6PW] job/duplicate-job [server-1] remote_revs=0 server='server-0' +... AAAA I test01[N6PW] job/leader-found [server-1] leader=None leader_revs=0 +... AAAA D test01[N6PW] job/updating-status [server-1] status='waiting for worker slot (slow)' +... AAAA D test01[N6PW] job/updating-status [server-1] status='running (slow)' +... AAAA D test01[N6PW] job/updating-status [server-1] status='finished' +... +... AAAA I test01[A4WN] job/woken [server-0] trigger=None +... AAAA D test01[A4WN] job/updating-status [server-0] status='checking neighbours' +... +... AAAA I test01[A4WN] job/local-revs [server-0] local_revs=0 +... AAAA I test01[A4WN] job/duplicate-job [server-0] remote_revs=0 server='server-1' +... AAAA I test01[A4WN] job/leader-found [server-0] leader=None leader_revs=0 +... AAAA D test01[A4WN] job/updating-status [server-0] status='waiting for worker slot (slow)' +... AAAA D test01[A4WN] job/updating-status [server-0] status='running (slow)' +... AAAA D revision/writing-info revision_uuid='...' tags='daily' +... AAAA D test01[A4WN] job/updating-status [server-0] status='finished' +... +""" + ) + == utils.log_data + ) + + +async def test_wait_for_leader_run_immediately(jobs_dry_run): + """ + sever 1 will stop waiting on signal + """ + + job0, job1 = await jobs_dry_run([10, 0.1]) + # server 0 is leader + await job0.run_backup({"daily"}, delta=datetime.timedelta(hours=-1)) + + utils.log_data = "" + + job0.start() + job1.start() + + await asyncio.sleep(0.5) + job1.run_immediately.set() + + while job1._task is not None: + await asyncio.sleep(0.1) + + job0.stop() + + assert ( + Ellipsis( + """\ +... +... AAAA I test01[A4WN] job/waiting [server-0] next_tags='daily' next_time='2015-09-01 09:06:57' +... AAAA D test01[A4WN] job/updating-status [server-0] status='waiting for deadline' +... +... AAAA I test01[N6PW] job/waiting [server-1] next_tags='daily' next_time='2015-09-01 09:06:47' +... AAAA D test01[N6PW] job/updating-status [server-1] status='waiting for deadline' +... AAAA I test01[N6PW] job/woken [server-1] trigger=None +... AAAA D test01[N6PW] job/updating-status [server-1] status='checking neighbours' +... +... AAAA I test01[N6PW] job/local-revs [server-1] local_revs=0 +... AAAA I test01[N6PW] job/duplicate-job [server-1] remote_revs=1 server='server-0' +... AAAA I test01[N6PW] job/leader-found [server-1] leader='server-0' leader_revs=1 +... AAAA D test01[N6PW] job/updating-status [server-1] status='monitoring (server-0)' +... +... AAAA I test01[N6PW] job/run-immediately-triggered [server-1] leader='server-0' +... AAAA D test01[N6PW] job/updating-status [server-1] status='waiting for worker slot (slow)' +... AAAA D test01[N6PW] job/updating-status [server-1] status='running (slow)' +... +... AAAA D test01[N6PW] job/updating-status [server-1] status='finished' +... +""" + ) + == utils.log_data + ) diff --git a/src/backy/tests/test_daemon.py b/src/backy/tests/test_daemon.py index 32e1a98a..b0a5af84 100644 --- a/src/backy/tests/test_daemon.py +++ b/src/backy/tests/test_daemon.py @@ -1,5 +1,6 @@ import asyncio import datetime +import json import os import re import signal @@ -56,6 +57,17 @@ async def daemon(tmp_path, monkeypatch, log): f.write("I am your father, Luke!") tmp_path.joinpath("dead01").mkdir() + with open(tmp_path / "dead01" / "config", "w") as f: + json.dump( + { + "schedule": {}, + "source": { + "type": "file", + "filename": str(tmp_path / "config"), + }, + }, + f, + ) async def null_coroutine(): return @@ -304,6 +316,9 @@ async def test_task_generator_backoff( async def null_coroutine(): await asyncio.sleep(0.1) + async def false_coroutine(*args, **kw): + return False + failures = [1, 1, 1] async def failing_coroutine(*args, **kw): @@ -321,8 +336,9 @@ async def failing_coroutine(*args, **kw): monkeypatch.setattr(job, "run_purge", null_coroutine) monkeypatch.setattr(job, "run_callback", null_coroutine) monkeypatch.setattr(job, "run_backup", failing_coroutine) - monkeypatch.setattr(job, "pull_metadata", failing_coroutine) - monkeypatch.setattr(job, "push_metadata", failing_coroutine) + monkeypatch.setattr(job, "pull_metadata", null_coroutine) + monkeypatch.setattr(job, "push_metadata", null_coroutine) + monkeypatch.setattr(job, "_wait_for_leader", false_coroutine) # This patch causes a single run through the generator loop. def update_status(status): diff --git a/src/backy/tests/test_scheduler.py b/src/backy/tests/test_scheduler.py index 279d75fd..987ddef3 100644 --- a/src/backy/tests/test_scheduler.py +++ b/src/backy/tests/test_scheduler.py @@ -17,7 +17,7 @@ def daemon(tmp_path): async def test_wait_for_deadline_no_deadline_fails(daemon, log): job = Job(daemon, "dummy", log) # Not having a deadline set causes this to fail (immediately) - with pytest.raises(TypeError): + with pytest.raises(AssertionError): await job._wait_for_deadline() diff --git a/src/backy/utils.py b/src/backy/utils.py index e6c58be3..90357ac9 100644 --- a/src/backy/utils.py +++ b/src/backy/utils.py @@ -11,8 +11,9 @@ import tempfile import time import typing +from asyncio import Event from os import DirEntry -from typing import IO, Callable, Iterable, List, Optional, TypeVar +from typing import IO, Callable, Iterable, List, Literal, Optional, TypeVar from zoneinfo import ZoneInfo import aiofiles.os as aos @@ -442,7 +443,7 @@ def min_date(): return datetime.datetime.min.replace(tzinfo=ZoneInfo("UTC")) -async def is_dir_no_symlink(p): +async def is_dir_no_symlink(p: str | os.PathLike) -> bool: return await aos.path.isdir(p) and not await aos.path.islink(p) @@ -472,13 +473,19 @@ async def has_recent_changes(path: str, reference_time: float) -> bool: return False -async def time_or_event(deadline, event): - remaining_time = (deadline - now()).total_seconds() +async def delay_or_event(delay: float, event: Event) -> Optional[Literal[True]]: return await next( - asyncio.as_completed([asyncio.sleep(remaining_time), event.wait()]) + asyncio.as_completed([asyncio.sleep(delay), event.wait()]) ) +async def time_or_event( + deadline: datetime.datetime, event: Event +) -> Optional[Literal[True]]: + remaining_time = (deadline - now()).total_seconds() + return await delay_or_event(remaining_time, event) + + def format_datetime_local(dt): tz = tzlocal.get_localzone() if dt is None: From 6ccad7522c5523ba4ef86f326ee56ccae9af1cfe Mon Sep 17 00:00:00 2001 From: Johann Bahl Date: Thu, 11 Apr 2024 13:32:18 +0200 Subject: [PATCH 13/13] update dependencies --- lib.nix | 10 +- poetry.lock | 1219 ++++++++++++++++++++++++++++----------------------- 2 files changed, 681 insertions(+), 548 deletions(-) diff --git a/lib.nix b/lib.nix index 27c994af..bb27f872 100644 --- a/lib.nix +++ b/lib.nix @@ -23,6 +23,12 @@ let scriv = super.scriv.overrideAttrs (old: { buildInputs = (old.buildInputs or []) ++ [ super.setuptools ]; }); + backports-tarfile = super.backports-tarfile.overrideAttrs (old: { + buildInputs = (old.buildInputs or []) ++ [ super.setuptools ]; + }); + docutils = super.docutils.overrideAttrs (old: { + buildInputs = (old.buildInputs or []) ++ [ super.flit-core ]; + }); execnet = super.execnet.overrideAttrs (old: { buildInputs = (old.buildInputs or []) ++ [ super.hatchling super.hatch-vcs ]; }); @@ -54,7 +60,7 @@ let nh3 = let getCargoHash = version: { - "0.2.15" = "sha256-fetAE3cj9hh4SoPE72Bqco5ytUMiDqbazeS2MHdUibM="; + "0.2.17" = "sha256-WomlVzKOUfcgAWGJInSvZn9hm+bFpgc4nJbRiyPCU64="; }.${version} or ( lib.warn "Unknown nh3 version: '${version}'. Please update getCargoHash." lib.fakeHash ); @@ -78,7 +84,7 @@ let cryptography = let getCargoHash = version: { - "41.0.7" = "sha256-VeZhKisCPDRvmSjGNwCgJJeVj65BZ0Ge+yvXbZw86Rw"; + "42.0.5" = "sha256-Pw3ftpcDMfZr/w6US5fnnyPVsFSB9+BuIKazDocYjTU="; }.${version} or ( lib.warn "Unknown cryptography version: '${version}'. Please update getCargoHash." lib.fakeHash ); diff --git a/poetry.lock b/poetry.lock index a29ce684..22647f92 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,87 +13,87 @@ files = [ [[package]] name = "aiohttp" -version = "3.9.1" +version = "3.9.3" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, - {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, - {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, - {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, - {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, - {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, - {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, - {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, - {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, - {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, - {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, - {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, ] [package.dependencies] @@ -162,38 +162,55 @@ tests = ["attrs[tests-no-zope]", "zope-interface"] tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +[[package]] +name = "backports-tarfile" +version = "1.0.0" +description = "Backport of CPython tarfile module" +optional = false +python-versions = ">=3.8" +files = [ + {file = "backports.tarfile-1.0.0-py3-none-any.whl", hash = "sha256:bcd36290d9684beb524d3fe74f4a2db056824c47746583f090b8e55daf0776e4"}, + {file = "backports.tarfile-1.0.0.tar.gz", hash = "sha256:2688f159c21afd56a07b75f01306f9f52c79aebcc5f4a117fb8fbb4445352c75"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] + [[package]] name = "build" -version = "1.0.3" +version = "1.2.1" description = "A simple, correct Python build frontend" optional = false -python-versions = ">= 3.7" +python-versions = ">=3.8" files = [ - {file = "build-1.0.3-py3-none-any.whl", hash = "sha256:589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f"}, - {file = "build-1.0.3.tar.gz", hash = "sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b"}, + {file = "build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"}, + {file = "build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"}, ] [package.dependencies] colorama = {version = "*", markers = "os_name == \"nt\""} -packaging = ">=19.0" +importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} +packaging = ">=19.1" pyproject_hooks = "*" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} [package.extras] docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] -test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] -typing = ["importlib-metadata (>=5.1)", "mypy (>=1.5.0,<1.6.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] +typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +uv = ["uv (>=0.1.18)"] virtualenv = ["virtualenv (>=20.0.35)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -400,70 +417,80 @@ click = "*" [[package]] name = "cmarkgfm" -version = "2022.10.27" +version = "2024.1.14" description = "Minimal bindings to GitHub's fork of cmark" optional = false python-versions = "*" files = [ - {file = "cmarkgfm-2022.10.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6a3970cf1c8ba4465d5046dd6a6d7f6024e67d6eec812a4701a21c5161a2fbd"}, - {file = "cmarkgfm-2022.10.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4325b75a3b5b802d5edcc2378aa6405a1e5df0aeeec583d1b05d73b0562fa7d0"}, - {file = "cmarkgfm-2022.10.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:216a540e85258839cffa7274731a87d91b3e17c9079b3b02467c312e784b5281"}, - {file = "cmarkgfm-2022.10.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0756ea0f6b55eff2617ea0518d6730e37d6077c10baaabbe8b46210ff5a250ef"}, - {file = "cmarkgfm-2022.10.27-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a91279ab8e2869c19120595e41ebd81a6f5034c1e6b1cfc5e81cd80d40bf3eb"}, - {file = "cmarkgfm-2022.10.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:69a769feb1b2d16982fe952afd44e124a4d306a44cdfd6857e74b8eb5d47d765"}, - {file = "cmarkgfm-2022.10.27-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:21557c06a411b1d754eed7f6fc9a8ff41f8a4a004b32c8bd2cec2ab3f3cb4d3c"}, - {file = "cmarkgfm-2022.10.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e9f038a4f0e54c135e468994f1ea97141b086d1f1bd8f498c12f3d559017e8e"}, - {file = "cmarkgfm-2022.10.27-cp310-cp310-win32.whl", hash = "sha256:5fc7178a6afd69a5dfc197558791cecedead9fc77e95ec63c201e8219ce33000"}, - {file = "cmarkgfm-2022.10.27-cp310-cp310-win_amd64.whl", hash = "sha256:f17677e66f95f25999c959c3f5361c05e739ad4f6b70ab9fdd24b1734c3ab029"}, - {file = "cmarkgfm-2022.10.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:670b414274edf3ecc0a950a80580e1de553c599a30658827a5d7f7bccbde5843"}, - {file = "cmarkgfm-2022.10.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8830dfb61251f2b677dea7ffc531c3f6037f7e9a66a14ad24bdaf3cefe2dc8c4"}, - {file = "cmarkgfm-2022.10.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8daf62cddc81b31a8f3c9093936c4cb75b25a8024c09f276cb027f1647e3326"}, - {file = "cmarkgfm-2022.10.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210c0f0dbc1aadab30bc75c48b14b645414733a668df52b43058028e43a046e8"}, - {file = "cmarkgfm-2022.10.27-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd6315e1036d31884bff25719636e3499a7f4593b0f7b47dc742678328f2f26f"}, - {file = "cmarkgfm-2022.10.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b0b13eac6194d59f9d3ab44af7076221510e788572f34e25104ad47b33d960e1"}, - {file = "cmarkgfm-2022.10.27-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76beb5b50b32d7bafec2154608a037601a2186d15df95cec6ab4cc937afca365"}, - {file = "cmarkgfm-2022.10.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d3fd62dd65c3a64ced175a1447ea41b01a7ac1c0df1c8358323267c9326b7745"}, - {file = "cmarkgfm-2022.10.27-cp311-cp311-win32.whl", hash = "sha256:1790164f84e6b037d0b39df11f757e021a9f9c313681297a051d50bc7b5249fc"}, - {file = "cmarkgfm-2022.10.27-cp311-cp311-win_amd64.whl", hash = "sha256:799cf03a82a7849d975a3b955798d5e439a08fb678b657c5078115dc61314674"}, - {file = "cmarkgfm-2022.10.27-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ca0e03a590c6f62738d208f8689da08eae9d3bcc2f4dd97e38df45d8dbc333ab"}, - {file = "cmarkgfm-2022.10.27-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2d3bdb7e525abd03366a57eabd03e0c3f3f36bbf8af2267200605b7b712763b"}, - {file = "cmarkgfm-2022.10.27-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e267ce890b579585a32f77d347d61de2390b517cfc52bb4ca67c5c4b4c055a"}, - {file = "cmarkgfm-2022.10.27-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f938c503fce528d9cb715314134f8900cf09ddbd7e2bea88cf54a4bad58d0d5b"}, - {file = "cmarkgfm-2022.10.27-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c82af8cdb76a71459662e447f9b1545ae6146cb9287df978705a298f87a76a90"}, - {file = "cmarkgfm-2022.10.27-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:89dcd4fea4ae44f1a0697cf805b6931a126b2b3ea23ed1ccdad7e020425224a9"}, - {file = "cmarkgfm-2022.10.27-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ddc2bbb5572722758787066f5f841745c58452e28c59ce7c13b7228be1cb48f3"}, - {file = "cmarkgfm-2022.10.27-cp36-cp36m-win32.whl", hash = "sha256:ccfc25b5abfe1398426f099d840b5fa7dec118b44f06833e2ba8b67c6ffc12d9"}, - {file = "cmarkgfm-2022.10.27-cp36-cp36m-win_amd64.whl", hash = "sha256:fbec94c3e91b5e03d90a2cc2e865179e5bc58673e92b03ba64b520a97a0e9219"}, - {file = "cmarkgfm-2022.10.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0023de4b19bb557b143bed274f76cb36551f7f1d1cdffd29b6cde646b85d9ffb"}, - {file = "cmarkgfm-2022.10.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6672784820981d315b695bb7ce08d40886502368e133b453d675ff6f2fffae49"}, - {file = "cmarkgfm-2022.10.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07a06d424ccef98528cba1158946f92117e07579f1dc9942ed4fd70f81693b9f"}, - {file = "cmarkgfm-2022.10.27-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a6e597bdf595f81dc214e821b579b8d665116c55ed5288b599ae941e446098"}, - {file = "cmarkgfm-2022.10.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:071f5f0dac9475bab6a065878f248a69be52a7736b6c661e06ca7199f25fe097"}, - {file = "cmarkgfm-2022.10.27-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0176d51fb57162c642b1d2c70048950a5ae119af81e77565a0383b992b1f86d6"}, - {file = "cmarkgfm-2022.10.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8744be702511464d04c34000005009607471f1afe65d6037777747d6b4607e5f"}, - {file = "cmarkgfm-2022.10.27-cp37-cp37m-win32.whl", hash = "sha256:483e48613f5c7b3350cdabfd0f69aaa086513542d0de533f39e5669bf4df5de4"}, - {file = "cmarkgfm-2022.10.27-cp37-cp37m-win_amd64.whl", hash = "sha256:123ad8d50fbedacd036760ba46e36170bad9dd2c1e83655d8622b7803169bb49"}, - {file = "cmarkgfm-2022.10.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ea7d6cb95e2d74049cf08fde4ca6cbf030b9bf9ef75009847bbefb35094bb4c2"}, - {file = "cmarkgfm-2022.10.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98c0527153daf16589ef095aa72f06a4bdb9213433ff47811fbc4172c91d865b"}, - {file = "cmarkgfm-2022.10.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe84b8912b355b8036c093ecdd6abbe6df075176879a49867dd72b9e53449f3"}, - {file = "cmarkgfm-2022.10.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e897160be161161a565df94ce502714a1aa63af3ad682e6d1f1c7e6656fdbb"}, - {file = "cmarkgfm-2022.10.27-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bad39b832f734f588aea00868e53ba1aaf058d569e40e5c9016702edebf88e8"}, - {file = "cmarkgfm-2022.10.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:80cf50b52bc0a47c032706de27b9526b6035c73b57ce06662021144cba4b6c5e"}, - {file = "cmarkgfm-2022.10.27-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ae1b4b2c6b92f8f5b1e5416a2f5b1bba7a5f9aea29b0de79767ed80655527a"}, - {file = "cmarkgfm-2022.10.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ea8a84d3702ccc32f8dfd0917dfb95f3d1843a0b6f85131c5cbfd1480d1d31ee"}, - {file = "cmarkgfm-2022.10.27-cp38-cp38-win32.whl", hash = "sha256:c66077349e7f7d954aa37d770310de5a8214ac9dca9756440f99e008a0e693de"}, - {file = "cmarkgfm-2022.10.27-cp38-cp38-win_amd64.whl", hash = "sha256:cc70b89309404dd84a524d439aa2b2e54872e0f623f9523bd77e66526251954f"}, - {file = "cmarkgfm-2022.10.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5a39333e1fdcd0116c24adc33423999913865bd3cc83fc44b2218aac7fbe5637"}, - {file = "cmarkgfm-2022.10.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5342c6d12e343cc66b4b8dcd09fc0c1977cb32fd1d57c15bd756876606591ee9"}, - {file = "cmarkgfm-2022.10.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c804446b941dc08dcc3d2def3913cfc4bae954b80babfaa2a502e8ebdea29185"}, - {file = "cmarkgfm-2022.10.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c04921575e412a6459d645a45ca987061b17d89310c92aedf108f97f2b8b7b91"}, - {file = "cmarkgfm-2022.10.27-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1013ce61db1dd3febcaca1ee42cad9eb823852bb76cbae61c1488734ce51f2b7"}, - {file = "cmarkgfm-2022.10.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:db3449fdb87752be5ad0698d6f2ca030af320cdf71ebc9a1ebae1b9c1d3661c8"}, - {file = "cmarkgfm-2022.10.27-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:325c03644da5ab81a7071aae6fbafa3beb22413f7fd7440baf6d510cfcf7be21"}, - {file = "cmarkgfm-2022.10.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:27149c63b1190ee6e7dd4b32d0a2c313bc1856bcdde7a42a0a5b6ae42d97ed94"}, - {file = "cmarkgfm-2022.10.27-cp39-cp39-win32.whl", hash = "sha256:e65e492407d7cb3b695f3f715a1cbe6f97db69eb14011b8f156fc10c758b55c7"}, - {file = "cmarkgfm-2022.10.27-cp39-cp39-win_amd64.whl", hash = "sha256:3f510fafa9d904336eecc3aa41536fd287c2d32baa21b14d48950ced802ca531"}, - {file = "cmarkgfm-2022.10.27.tar.gz", hash = "sha256:93d9ac7716ea901ca0bfd18ae3b68f1f6bf51de0830c3f233ef734fcd52a0799"}, + {file = "cmarkgfm-2024.1.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b676300ff04629c07c02687e5dc017fb7d1edf38a5dc1ab2ec28078a71195ce6"}, + {file = "cmarkgfm-2024.1.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7ccd6689108673be5bb19f3a3b02cf897fab9b04290a0548e4c6deec5a1fec9a"}, + {file = "cmarkgfm-2024.1.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8d68eb075d2eb48e864d9c031b0117900e25a3c1c7ab8ce4f80c25b6ae7f346"}, + {file = "cmarkgfm-2024.1.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b62dbc7f546c43d99964914133b7e4d7ef7ce4f411cd0acc95b382853aa47f"}, + {file = "cmarkgfm-2024.1.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3fe7e5357233649030f22dd816354e51c99f90857cff904a173bbd602733aca"}, + {file = "cmarkgfm-2024.1.14-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4310b7d5cc534c27454406963319a16c7f3f05ff75b7cd24a1091884c04248ad"}, + {file = "cmarkgfm-2024.1.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4ecb19b2fb24fe055bbaf0d38e8262642f27123b4339f1f10e7fabb726e57ee8"}, + {file = "cmarkgfm-2024.1.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d3f497cbbaeb540a741a6516673045abcc11b41029bc3dbc95b3b5ea2cc2182"}, + {file = "cmarkgfm-2024.1.14-cp310-cp310-win32.whl", hash = "sha256:d399545833ff3f25f8f6d9b559be5878987f3e34cfc4fe79cefc1d33fd3da852"}, + {file = "cmarkgfm-2024.1.14-cp310-cp310-win_amd64.whl", hash = "sha256:bcd059584161491a2848e55ec73c28f17d8e89faec9ad860afa05246a6ee695f"}, + {file = "cmarkgfm-2024.1.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6565d5659d80d119733a407af70e5de7c04e0822fabb91db83688e0e280cc174"}, + {file = "cmarkgfm-2024.1.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:43db622d57e81dce30dcbea3a0198588080559473e5bafb68e10959dbf0a3d0f"}, + {file = "cmarkgfm-2024.1.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26d39c10b816392a3b54a9a7ba7ce894c40fee2813ac71554c424d6762c28ee6"}, + {file = "cmarkgfm-2024.1.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bba56ccec5b980c9da003ac7449c0cbe8045e4e046d4ce51273a47d3a9b84b1a"}, + {file = "cmarkgfm-2024.1.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c552ef80fe8c29d647c86ab49b8f66aa7626e7de9009e4005bd2120698667fa2"}, + {file = "cmarkgfm-2024.1.14-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ce0f76ea3d47d206913db8f7933076191be4d8871019095a4880885531586ab5"}, + {file = "cmarkgfm-2024.1.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b0b5a3be0b76f2802f8f16c40d33f74ff68c0bca8860bd4b5252395181077534"}, + {file = "cmarkgfm-2024.1.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:70db301d140d1f0cabe58770aad8506a4bd7fa0e5b0fae78c8f9a80712d14857"}, + {file = "cmarkgfm-2024.1.14-cp311-cp311-win32.whl", hash = "sha256:3cdc34d749601ff74209580fb7c80b3cbf1112d2832af52c14387cb04831ff2b"}, + {file = "cmarkgfm-2024.1.14-cp311-cp311-win_amd64.whl", hash = "sha256:8b8fd26ff27b9895f48459b8e556b9d6c4d255ac3735b3b2f8b14b9787ff6b89"}, + {file = "cmarkgfm-2024.1.14-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb595be70d86d1497826f80e3e04b46001b2b94bccc099619daea693923a88b3"}, + {file = "cmarkgfm-2024.1.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a8c929836caf96c4fb7fd5e7f9efe6fbb6aa57f9dfae543f74f10b55d5a92b56"}, + {file = "cmarkgfm-2024.1.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07b83ae8de9a397629b4dbe4f190e8edc87b2b94548fbc644ed006a5cc94d959"}, + {file = "cmarkgfm-2024.1.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a922846197ac9452cca8aaa9c4b5e56b215299d067e704df9c56c8d87463335"}, + {file = "cmarkgfm-2024.1.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53ccf3d184080f8626264f49866df4ead90685f4d5a3d5d01ffba7f50fd15512"}, + {file = "cmarkgfm-2024.1.14-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a9bd82bac3673a0848fa09512ae61059943462ce824944faaeadc727068d9e19"}, + {file = "cmarkgfm-2024.1.14-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1f530595c5fbd92c248576ce43771bd07289c1d42623422d669e20ddb545d41b"}, + {file = "cmarkgfm-2024.1.14-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2196871b8f42dea9d85e1a3c1bcb5e05758946874441ec3f7226d80881284e32"}, + {file = "cmarkgfm-2024.1.14-cp312-cp312-win32.whl", hash = "sha256:7de3d8bfc64168419a6d1edb223616ac6f54b67e20e4b6b70f3ae659fd3f3f31"}, + {file = "cmarkgfm-2024.1.14-cp312-cp312-win_amd64.whl", hash = "sha256:ded64f663955f852d8965b40ab47718461df6d42d12719264b09a23d33b8d38f"}, + {file = "cmarkgfm-2024.1.14-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3340c8867ee9a3d2590eb020f22cdc4e101d3d4d9f4a8cc95964f45666ed58f0"}, + {file = "cmarkgfm-2024.1.14-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc6fb79ef6f93dd497fd2b85be032636bb955a762c42f52ed65fefe32525fc2b"}, + {file = "cmarkgfm-2024.1.14-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890a235fc0cf5c821ed4dd81a5405a526e9cc616761345aee54840a575b7f969"}, + {file = "cmarkgfm-2024.1.14-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa2e85a1e0f3929015d5fe023bd3664d8c744d3f898bdd05df99ed905782376"}, + {file = "cmarkgfm-2024.1.14-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:283de0cf88c784f71e8416cf40fffc133d6ffd9686dbf094c4b5c522e1283343"}, + {file = "cmarkgfm-2024.1.14-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:2bc2dc101dd6b27aaa264f4c3e8eb444b850b06c60dc4cd278ea743fb085c35b"}, + {file = "cmarkgfm-2024.1.14-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:494daf9790dd1a5440bbe3f5c70e01afbeeb3b7565d373b05b3bfa311a0ac3e3"}, + {file = "cmarkgfm-2024.1.14-cp36-cp36m-win32.whl", hash = "sha256:e51707471196d199f03aff1c5e12860d12a44e7bf29302e7bb485074f8e62ff0"}, + {file = "cmarkgfm-2024.1.14-cp36-cp36m-win_amd64.whl", hash = "sha256:bee28a3cc0abae18a46119ff1cde0db991f5ebe235d24c95bbaa672a63b5d695"}, + {file = "cmarkgfm-2024.1.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e601ed6c9a44c5f86b392afef2f66711da07924825a4ff695eec9a3cfc905732"}, + {file = "cmarkgfm-2024.1.14-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f8205bed32be4226f8e344d4e4be001a57929059c3303107485dacc145df5a8"}, + {file = "cmarkgfm-2024.1.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d14d928ad46974098979358ca456a056b8df91d5e6fa6e9294bd1173dfdfa10"}, + {file = "cmarkgfm-2024.1.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56d68fcf5e71879c327f0889b1127ce58e85e1ac44a624f24933430d726381f3"}, + {file = "cmarkgfm-2024.1.14-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:57545dfbd9f26fd3b07909d1bac3dacba4cb493fe872da2f4237b4c3597d7a3a"}, + {file = "cmarkgfm-2024.1.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:17b61950e32b6f1d6f93a28beaa5870a2b30e44ce846529ff301c986b817f036"}, + {file = "cmarkgfm-2024.1.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f223e606b57eebfc81e95839cf22cbe5eaec0da9ecd42326cc46568e0029188c"}, + {file = "cmarkgfm-2024.1.14-cp37-cp37m-win32.whl", hash = "sha256:ff0bc7dbb86d1a6877b771ed715dbe0ab071872e8c6f5beb782528b70ac7eedc"}, + {file = "cmarkgfm-2024.1.14-cp37-cp37m-win_amd64.whl", hash = "sha256:a4b20a59dc14a074bae0bd04306e504512c1883b0c9b6e0e0c5b217797571363"}, + {file = "cmarkgfm-2024.1.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4808af8c8c2c0a39dca8cc27406416d8254aec635f409dc5beb5cb5ed3af564a"}, + {file = "cmarkgfm-2024.1.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c71d6b44ffdd9a03e246a35c8a2b2454eb2a319fcfe5736ff62660259f9f4683"}, + {file = "cmarkgfm-2024.1.14-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7973812317ef672c5a1c59065294c4b280aaffb7fe1071b8c661de82021d4f1b"}, + {file = "cmarkgfm-2024.1.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:966bd27336736b2950551348b1bfc71863ea26b76b245e9966ee80cc9cde04ee"}, + {file = "cmarkgfm-2024.1.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02ba52518469ce85448e99beb20b1808c51f5a6cfee12554e5e810bfe774b791"}, + {file = "cmarkgfm-2024.1.14-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9659715d79cbc5c030d36709fe443a47b5c6dbb77bd05e5531feedbe5ad00ac5"}, + {file = "cmarkgfm-2024.1.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ff20857ccded325e11323540d0506fc1bd9ceea9746f81f08fe3d7e52f0a6bc"}, + {file = "cmarkgfm-2024.1.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7eff0e618704c49ab3f77680a0722aced6c99d2fe590b8f8d655fa29ed17270e"}, + {file = "cmarkgfm-2024.1.14-cp38-cp38-win32.whl", hash = "sha256:93364f7ec9de71285f0a27552f9cfa30aa4d311d37c820daa65dc27ab211a746"}, + {file = "cmarkgfm-2024.1.14-cp38-cp38-win_amd64.whl", hash = "sha256:ca9e5388c88f907c9ef1cf588947ea00a1c60f3462fe1f213b591bbd27ceb8c1"}, + {file = "cmarkgfm-2024.1.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1444f396fc18115065c66bb6f8a523167fa3bd153423d6fea272c4b486b473cf"}, + {file = "cmarkgfm-2024.1.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fdecbdad66b7738c711db33471d510c6279a01196920c43294d8071e51192807"}, + {file = "cmarkgfm-2024.1.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29b95d267ee1c61fd41d1f8634052a77b554517b18e8ffcdf0181e5e93a7af88"}, + {file = "cmarkgfm-2024.1.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56ec68a68164bf2ac0fd448d7fcb90d360a42072a6abd62be3e0d222240b7304"}, + {file = "cmarkgfm-2024.1.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:491d72e0dcc21ff9f3c9a905f785d4268170964500db498dc5b0f2f390fb340b"}, + {file = "cmarkgfm-2024.1.14-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0f7845837b0c4dbb3a1a7bbfffed264f011f86ea1cd0ce7823b533bcce0e1fe3"}, + {file = "cmarkgfm-2024.1.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0c66d0fecfdee9f2ffe34f640e346c653a951bddba79d192629c60958933aa51"}, + {file = "cmarkgfm-2024.1.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ec6eb4929bd231d7e5c0cf1154581be0a13b8d2b7aee2c30410066f363c765b9"}, + {file = "cmarkgfm-2024.1.14-cp39-cp39-win32.whl", hash = "sha256:d55795968751e1a8ddd2172c1d03d0107b9dd20445dbcd23dc9e0a80d95a0c5b"}, + {file = "cmarkgfm-2024.1.14-cp39-cp39-win_amd64.whl", hash = "sha256:b79662ab458c910c9785abacb8315d6f46487659d44398bd894f577bb6b9d04e"}, + {file = "cmarkgfm-2024.1.14.tar.gz", hash = "sha256:a208c1726e12ba385125cef2c6d375c41c5dea4cc2673a77af712b1dbf074e90"}, ] [package.dependencies] @@ -498,63 +525,63 @@ unixsocket = ["requests-unixsocket (>=0.1.4,<=1.0.0)"] [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] [package.dependencies] @@ -565,47 +592,56 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -621,13 +657,13 @@ files = [ [[package]] name = "docutils" -version = "0.20.1" +version = "0.21.1" description = "Docutils -- Python Documentation Utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, + {file = "docutils-0.21.1-py3-none-any.whl", hash = "sha256:14c8d34a55b46c88f9f714adb29cefbdd69fb82f3fef825e59c5faab935390d8"}, + {file = "docutils-0.21.1.tar.gz", hash = "sha256:65249d8a5345bc95e0f40f280ba63c98eb24de35c6c8f5b662e3e8948adea83f"}, ] [[package]] @@ -646,13 +682,13 @@ test = ["pytest (>=6)"] [[package]] name = "execnet" -version = "2.0.2" +version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, - {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, ] [package.extras] @@ -660,35 +696,35 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "filelock" -version = "3.13.1" +version = "3.13.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, + {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "6.1.0" +version = "7.0.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, - {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, + {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, + {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.1.0,<3.2.0" +pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "frozenlist" @@ -776,13 +812,13 @@ tests = ["freezegun", "pytest", "pytest-cov"] [[package]] name = "identify" -version = "2.5.33" +version = "2.5.35" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, - {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, + {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, + {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, ] [package.extras] @@ -790,33 +826,33 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "7.1.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -831,13 +867,13 @@ files = [ [[package]] name = "jaraco-classes" -version = "3.3.0" +version = "3.4.0" description = "Utility functions for Python class constructs" optional = false python-versions = ">=3.8" files = [ - {file = "jaraco.classes-3.3.0-py3-none-any.whl", hash = "sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb"}, - {file = "jaraco.classes-3.3.0.tar.gz", hash = "sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621"}, + {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"}, + {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"}, ] [package.dependencies] @@ -845,7 +881,43 @@ more-itertools = "*" [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jaraco-context" +version = "5.3.0" +description = "Useful decorators and context managers" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.context-5.3.0-py3-none-any.whl", hash = "sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266"}, + {file = "jaraco.context-5.3.0.tar.gz", hash = "sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2"}, +] + +[package.dependencies] +"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["portend", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jaraco-functools" +version = "4.0.0" +description = "Functools like those found in stdlib" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.functools-4.0.0-py3-none-any.whl", hash = "sha256:daf276ddf234bea897ef14f43c4e1bf9eefeac7b7a82a4dd69228ac20acff68d"}, + {file = "jaraco.functools-4.0.0.tar.gz", hash = "sha256:c279cb24c93d694ef7270f970d499cab4d3813f4e08273f95398651a634f0925"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.classes", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [[package]] name = "jeepney" @@ -864,13 +936,13 @@ trio = ["async_generator", "trio"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -881,26 +953,28 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "keyring" -version = "24.3.0" +version = "25.1.0" description = "Store and access your passwords safely." optional = false python-versions = ">=3.8" files = [ - {file = "keyring-24.3.0-py3-none-any.whl", hash = "sha256:4446d35d636e6a10b8bce7caa66913dd9eca5fd222ca03a3d42c38608ac30836"}, - {file = "keyring-24.3.0.tar.gz", hash = "sha256:e730ecffd309658a08ee82535a3b5ec4b4c8669a9be11efb66249d8e0aeb9a25"}, + {file = "keyring-25.1.0-py3-none-any.whl", hash = "sha256:26fc12e6a329d61d24aa47b22a7c5c3f35753df7d8f2860973cf94f4e1fb3427"}, + {file = "keyring-25.1.0.tar.gz", hash = "sha256:7230ea690525133f6ad536a9b5def74a4bd52642abe594761028fc044d7c7893"}, ] [package.dependencies] importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} "jaraco.classes" = "*" +"jaraco.context" = "*" +"jaraco.functools" = "*" jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} [package.extras] completion = ["shtab (>=1.1.0)"] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [[package]] name = "markdown-it-py" @@ -928,61 +1002,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -1009,75 +1093,91 @@ files = [ [[package]] name = "mmh3" -version = "4.0.1" +version = "4.1.0" description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." optional = false python-versions = "*" files = [ - {file = "mmh3-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b719ba87232749095011d567a36a25e40ed029fc61c47e74a12416d8bb60b311"}, - {file = "mmh3-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f0ad423711c5096cf4a346011f3b3ec763208e4f4cc4b10ed41cad2a03dbfaed"}, - {file = "mmh3-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80918e3f8ab6b717af0a388c14ffac5a89c15d827ff008c1ef545b8b32724116"}, - {file = "mmh3-4.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8222cd5f147defa1355b4042d590c34cef9b2bb173a159fcb72cda204061a4ac"}, - {file = "mmh3-4.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3821bcd1961ef19247c78c5d01b5a759de82ab0c023e2ff1d5ceed74322fa018"}, - {file = "mmh3-4.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59f7ed28c24249a54665f1ed3f6c7c1c56618473381080f79bcc0bd1d1db2e4a"}, - {file = "mmh3-4.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dacd8d07d4b9be8f0cb6e8fd9a08fc237c18578cf8d42370ee8af2f5a2bf1967"}, - {file = "mmh3-4.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd00883ef6bcf7831026ce42e773a4b2a4f3a7bf9003a4e781fecb1144b06c1"}, - {file = "mmh3-4.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:df73d1c7f0c50c0f8061cd349968fd9dcc6a9e7592d1c834fa898f9c98f8dd7e"}, - {file = "mmh3-4.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f41eeae98f15af0a4ba2a92bce11d8505b612012af664a7634bbfdba7096f5fc"}, - {file = "mmh3-4.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ce9bb622e9f1162cafd033071b32ac495c5e8d5863fca2a5144c092a0f129a5b"}, - {file = "mmh3-4.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dd92e0ff9edee6af960d9862a3e519d651e6344321fd280fb082654fc96ecc4d"}, - {file = "mmh3-4.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aefa8ac8c8fc8ad93365477baef2125dbfd7235880a9c47dca2c46a0af49ef7"}, - {file = "mmh3-4.0.1-cp310-cp310-win32.whl", hash = "sha256:a076ea30ec279a63f44f4c203e4547b5710d00581165fed12583d2017139468d"}, - {file = "mmh3-4.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5aa1e87e448ee1ffa3737b72f2fe3f5960159ab75bbac2f49dca6fb9797132f6"}, - {file = "mmh3-4.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:45155ff2f291c3a1503d1c93e539ab025a13fd8b3f2868650140702b8bd7bfc2"}, - {file = "mmh3-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:91f81d6dd4d0c3b4235b4a58a545493c946669c751a2e0f15084171dc2d81fee"}, - {file = "mmh3-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bbfddaf55207798f5b29341e5b3a24dbff91711c51b1665eabc9d910255a78f0"}, - {file = "mmh3-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0deb8e19121c0896fdc709209aceda30a367cda47f4a884fcbe56223dbf9e867"}, - {file = "mmh3-4.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df468ac7b61ec7251d7499e27102899ca39d87686f659baf47f84323f8f4541f"}, - {file = "mmh3-4.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84936c113814c6ef3bc4bd3d54f538d7ba312d1d0c2441ac35fdd7d5221c60f6"}, - {file = "mmh3-4.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b1df3cf5ce5786aa093f45462118d87ff485f0d69699cdc34f6289b1e833632"}, - {file = "mmh3-4.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da281aa740aa9e7f9bebb879c1de0ea9366687ece5930f9f5027e7c87d018153"}, - {file = "mmh3-4.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ec380933a56eb9fea16d7fcd49f1b5a5c92d7d2b86f25e9a845b72758ee8c42"}, - {file = "mmh3-4.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2fa905fcec8a30e1c0ef522afae1d6170c4f08e6a88010a582f67c59209fb7c7"}, - {file = "mmh3-4.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9b23a06315a65ef0b78da0be32409cfce0d6d83e51d70dcebd3302a61e4d34ce"}, - {file = "mmh3-4.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:36c27089b12026db14be594d750f7ea6d5d785713b40a971b063f033f5354a74"}, - {file = "mmh3-4.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6338341ae6fa5eaa46f69ed9ac3e34e8eecad187b211a6e552e0d8128c568eb1"}, - {file = "mmh3-4.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1aece29e27d0c8fb489d00bb712fba18b4dd10e39c9aec2e216c779ae6400b8f"}, - {file = "mmh3-4.0.1-cp311-cp311-win32.whl", hash = "sha256:2733e2160c142eed359e25e5529915964a693f0d043165b53933f904a731c1b3"}, - {file = "mmh3-4.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:09f9f643e0b7f8d98473efdfcdb155105824a38a1ada374625b84c1208197a9b"}, - {file = "mmh3-4.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:d93422f38bc9c4d808c5438a011b769935a87df92ce277e9e22b6ec0ae8ed2e2"}, - {file = "mmh3-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:41013c033dc446d3bfb573621b8b53223adcfcf07be1da0bcbe166d930276882"}, - {file = "mmh3-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be46540eac024dd8d9b82899d35b2f23592d3d3850845aba6f10e6127d93246b"}, - {file = "mmh3-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0e64114b30c6c1e30f8201433b5fa6108a74a5d6f1a14af1b041360c0dd056aa"}, - {file = "mmh3-4.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:275637ecca755565e3b0505d3ecf8e1e0a51eb6a3cbe6e212ed40943f92f98cd"}, - {file = "mmh3-4.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:955178c8e8d3bc9ad18eab443af670cd13fe18a6b2dba16db2a2a0632be8a133"}, - {file = "mmh3-4.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:750afe0477e0c17904611045ad311ff10bc6c2ec5f5ddc5dd949a2b9bf71d5d5"}, - {file = "mmh3-4.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b7c18c35e9d6a59d6c5f94a6576f800ff2b500e41cd152ecfc7bb4330f32ba2"}, - {file = "mmh3-4.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b8635b1fc6b25d93458472c5d682a1a4b9e6c53e7f4ca75d2bf2a18fa9363ae"}, - {file = "mmh3-4.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:057b8de47adee8ad0f2e194ffa445b9845263c1c367ddb335e9ae19c011b25cc"}, - {file = "mmh3-4.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:78c0ee0197cfc912f57172aa16e784ad55b533e2e2e91b3a65188cc66fbb1b6e"}, - {file = "mmh3-4.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:d6acb15137467592691e41e6f897db1d2823ff3283111e316aa931ac0b5a5709"}, - {file = "mmh3-4.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:f91b2598e1f25e013da070ff641a29ebda76292d3a7bdd20ef1736e9baf0de67"}, - {file = "mmh3-4.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a78f6f2592395321e2f0dc6b618773398b2c9b15becb419364e0960df53e9f04"}, - {file = "mmh3-4.0.1-cp38-cp38-win32.whl", hash = "sha256:d8650982d0b70af24700bd32b15fab33bb3ef9be4af411100f4960a938b0dd0f"}, - {file = "mmh3-4.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:2489949c7261870a02eeaa2ec7b966881c1775df847c8ce6ea4de3e9d96b5f4f"}, - {file = "mmh3-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:dcd03a4bb0fa3db03648d26fb221768862f089b6aec5272f0df782a8b4fe5b5b"}, - {file = "mmh3-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3775fb0cc675977e5b506b12b8f23cd220be3d4c2d4db7df81f03c9f61baa4cc"}, - {file = "mmh3-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f250f78328d41cdf73d3ad9809359636f4fb7a846d7a6586e1a0f0d2f5f2590"}, - {file = "mmh3-4.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4161009c9077d5ebf8b472dbf0f41b9139b3d380e0bbe71bf9b503efb2965584"}, - {file = "mmh3-4.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2cf986ebf530717fefeee8d0decbf3f359812caebba985e2c8885c0ce7c2ee4e"}, - {file = "mmh3-4.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b55741ed51e928b1eec94a119e003fa3bc0139f4f9802e19bea3af03f7dd55a"}, - {file = "mmh3-4.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8250375641b8c5ce5d56a00c6bb29f583516389b8bde0023181d5eba8aa4119"}, - {file = "mmh3-4.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29373e802bc094ffd490e39047bac372ac893c0f411dac3223ef11775e34acd0"}, - {file = "mmh3-4.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:071ba41e56f5c385d13ee84b288ccaf46b70cd9e9a6d8cbcbe0964dee68c0019"}, - {file = "mmh3-4.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:909e0b88d2c6285481fa6895c2a0faf6384e1b0093f72791aa57d1e04f4adc65"}, - {file = "mmh3-4.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:51d356f4380f9d9c2a0612156c3d1e7359933991e84a19304440aa04fd723e68"}, - {file = "mmh3-4.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c4b2549949efa63d8decb6572f7e75fad4f2375d52fafced674323239dd9812d"}, - {file = "mmh3-4.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9bcc7b32a89c4e5c6fdef97d82e8087ba26a20c25b4aaf0723abd0b302525934"}, - {file = "mmh3-4.0.1-cp39-cp39-win32.whl", hash = "sha256:8edee21ae4f4337fb970810ef5a263e5d2212b85daca0d39daf995e13380e908"}, - {file = "mmh3-4.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8cbb6f90f08952fcc90dbf08f0310fdf4d61096c5cb7db8adf03e23f3b857ae5"}, - {file = "mmh3-4.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:ce71856cbca9d7c74d084eeee1bc5b126ed197c1c9530a4fdb994d099b9bc4db"}, - {file = "mmh3-4.0.1.tar.gz", hash = "sha256:ad8be695dc4e44a79631748ba5562d803f0ac42d36a6b97a53aca84a70809385"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, + {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, + {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, + {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, + {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, + {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, + {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, + {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, + {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, + {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, + {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, + {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, + {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, + {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, + {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, + {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, ] [package.extras] @@ -1085,121 +1185,137 @@ test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] [[package]] name = "more-itertools" -version = "10.1.0" +version = "10.2.0" description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.8" files = [ - {file = "more-itertools-10.1.0.tar.gz", hash = "sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a"}, - {file = "more_itertools-10.1.0-py3-none-any.whl", hash = "sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6"}, + {file = "more-itertools-10.2.0.tar.gz", hash = "sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1"}, + {file = "more_itertools-10.2.0-py3-none-any.whl", hash = "sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684"}, ] [[package]] name = "multidict" -version = "6.0.4" +version = "6.0.5" description = "multidict implementation" optional = false python-versions = ">=3.7" files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] [[package]] name = "nh3" -version = "0.2.15" +version = "0.2.17" description = "Python bindings to the ammonia HTML sanitization library." optional = false python-versions = "*" files = [ - {file = "nh3-0.2.15-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:9c0d415f6b7f2338f93035bba5c0d8c1b464e538bfbb1d598acd47d7969284f0"}, - {file = "nh3-0.2.15-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6f42f99f0cf6312e470b6c09e04da31f9abaadcd3eb591d7d1a88ea931dca7f3"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac19c0d68cd42ecd7ead91a3a032fdfff23d29302dbb1311e641a130dfefba97"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0d77272ce6d34db6c87b4f894f037d55183d9518f948bba236fe81e2bb4e28"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8d595df02413aa38586c24811237e95937ef18304e108b7e92c890a06793e3bf"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86e447a63ca0b16318deb62498db4f76fc60699ce0a1231262880b38b6cff911"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3277481293b868b2715907310c7be0f1b9d10491d5adf9fce11756a97e97eddf"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60684857cfa8fdbb74daa867e5cad3f0c9789415aba660614fe16cd66cbb9ec7"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3b803a5875e7234907f7d64777dfde2b93db992376f3d6d7af7f3bc347deb305"}, - {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770"}, - {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:f3b53ba93bb7725acab1e030bc2ecd012a817040fd7851b332f86e2f9bb98dc6"}, - {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:b1e97221cedaf15a54f5243f2c5894bb12ca951ae4ddfd02a9d4ea9df9e1a29d"}, - {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a5167a6403d19c515217b6bcaaa9be420974a6ac30e0da9e84d4fc67a5d474c5"}, - {file = "nh3-0.2.15-cp37-abi3-win32.whl", hash = "sha256:427fecbb1031db085eaac9931362adf4a796428ef0163070c484b5a768e71601"}, - {file = "nh3-0.2.15-cp37-abi3-win_amd64.whl", hash = "sha256:bc2d086fb540d0fa52ce35afaded4ea526b8fc4d3339f783db55c95de40ef02e"}, - {file = "nh3-0.2.15.tar.gz", hash = "sha256:d1e30ff2d8d58fb2a14961f7aac1bbb1c51f9bdd7da727be35c63826060b0bf3"}, + {file = "nh3-0.2.17-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9"}, + {file = "nh3-0.2.17-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10"}, + {file = "nh3-0.2.17-cp37-abi3-win32.whl", hash = "sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911"}, + {file = "nh3-0.2.17-cp37-abi3-win_amd64.whl", hash = "sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb"}, + {file = "nh3-0.2.17.tar.gz", hash = "sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028"}, ] [[package]] @@ -1218,53 +1334,53 @@ setuptools = "*" [[package]] name = "packaging" -version = "23.2" +version = "23.1" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] [[package]] name = "pkginfo" -version = "1.9.6" +version = "1.10.0" description = "Query metadata from sdists / bdists / installed packages." optional = false python-versions = ">=3.6" files = [ - {file = "pkginfo-1.9.6-py3-none-any.whl", hash = "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546"}, - {file = "pkginfo-1.9.6.tar.gz", hash = "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046"}, + {file = "pkginfo-1.10.0-py3-none-any.whl", hash = "sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097"}, + {file = "pkginfo-1.10.0.tar.gz", hash = "sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297"}, ] [package.extras] -testing = ["pytest", "pytest-cov"] +testing = ["pytest", "pytest-cov", "wheel"] [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -1273,13 +1389,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.6.0" +version = "3.7.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" files = [ - {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"}, - {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"}, + {file = "pre_commit-3.7.0-py2.py3-none-any.whl", hash = "sha256:5eae9e10c2b5ac51577c3452ec0a490455c45a0533f7960f993a0d01e59decab"}, + {file = "pre_commit-3.7.0.tar.gz", hash = "sha256:e209d61b8acdcf742404408531f0c37d49d2c734fd7cff2d6076083d191cb060"}, ] [package.dependencies] @@ -1302,24 +1418,24 @@ files = [ [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] name = "pyflakes" -version = "3.1.0" +version = "3.2.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" files = [ - {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, - {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] [[package]] @@ -1394,17 +1510,17 @@ testing = ["coverage (==6.2)", "mypy (==0.931)"] [[package]] name = "pytest-asyncio" -version = "0.23.3" +version = "0.23.6" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.23.3.tar.gz", hash = "sha256:af313ce900a62fbe2b1aed18e37ad757f1ef9940c6b6a88e2954de38d6b1fb9f"}, - {file = "pytest_asyncio-0.23.3-py3-none-any.whl", hash = "sha256:37a9d912e8338ee7b4a3e917381d1c95bfc8682048cb0fbc35baba316ec1faba"}, + {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"}, + {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"}, ] [package.dependencies] -pytest = ">=7.0.0" +pytest = ">=7.0.0,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] @@ -1459,17 +1575,17 @@ pytest = ">=7.0" [[package]] name = "pytest-timeout" -version = "2.2.0" +version = "2.3.1" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"}, - {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"}, + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, ] [package.dependencies] -pytest = ">=5.0.0" +pytest = ">=7.0.0" [[package]] name = "python-lzo" @@ -1511,6 +1627,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1518,8 +1635,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1536,6 +1661,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1543,6 +1669,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1550,13 +1677,13 @@ files = [ [[package]] name = "readme-renderer" -version = "42.0" +version = "43.0" description = "readme_renderer is a library for rendering readme descriptions for Warehouse" optional = false python-versions = ">=3.8" files = [ - {file = "readme_renderer-42.0-py3-none-any.whl", hash = "sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d"}, - {file = "readme_renderer-42.0.tar.gz", hash = "sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1"}, + {file = "readme_renderer-43.0-py3-none-any.whl", hash = "sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9"}, + {file = "readme_renderer-43.0.tar.gz", hash = "sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311"}, ] [package.dependencies] @@ -1619,13 +1746,13 @@ idna2008 = ["idna"] [[package]] name = "rich" -version = "13.7.0" +version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [package.dependencies] @@ -1675,29 +1802,29 @@ jeepney = ">=0.6" [[package]] name = "setuptools" -version = "69.0.3" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shortuuid" -version = "1.0.11" +version = "1.0.13" description = "A generator library for concise, unambiguous and URL-safe UUIDs." optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "shortuuid-1.0.11-py3-none-any.whl", hash = "sha256:27ea8f28b1bd0bf8f15057a3ece57275d2059d2b0bb02854f02189962c13b6aa"}, - {file = "shortuuid-1.0.11.tar.gz", hash = "sha256:fc75f2615914815a8e4cb1501b3a513745cb66ef0fd5fc6fb9f8c3fa3481f789"}, + {file = "shortuuid-1.0.13-py3-none-any.whl", hash = "sha256:a482a497300b49b4953e15108a7913244e1bb0d41f9d332f5e9925dba33a3c5a"}, + {file = "shortuuid-1.0.13.tar.gz", hash = "sha256:3bb9cf07f606260584b1df46399c0b87dd84773e7b25912b7e391e30797c5e72"}, ] [[package]] @@ -1730,13 +1857,13 @@ files = [ [[package]] name = "twine" -version = "4.0.2" +version = "5.0.0" description = "Collection of utilities for publishing packages on PyPI" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"}, - {file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"}, + {file = "twine-5.0.0-py3-none-any.whl", hash = "sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0"}, + {file = "twine-5.0.0.tar.gz", hash = "sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4"}, ] [package.dependencies] @@ -1752,13 +1879,13 @@ urllib3 = ">=1.26.0" [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] @@ -1780,17 +1907,18 @@ devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3) [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -1903,19 +2031,18 @@ multidict = ">=4.0" [[package]] name = "zest-releaser" -version = "9.1.1" +version = "9.1.3" description = "Software releasing made easy and repeatable" optional = false python-versions = ">=3.8" files = [ - {file = "zest.releaser-9.1.1-py3-none-any.whl", hash = "sha256:71f44163e9a13b3de6b09a568070f677c1d5fcc85e4f7ddb7627b51c7652efbb"}, - {file = "zest.releaser-9.1.1.tar.gz", hash = "sha256:4434c44597fdbdb890b768e5b61e1e13bad2155bf844e823219a831bf135345a"}, + {file = "zest.releaser-9.1.3.tar.gz", hash = "sha256:7ef1fb259a7f6b3e103d799f4d7092614c9daa1be491d28c08063353c1ad6001"}, ] [package.dependencies] build = ">=1.0.0" colorama = "*" -readme-renderer = {version = ">=40", extras = ["md"]} +readme_renderer = {version = ">=40", extras = ["md"]} requests = "*" setuptools = ">=61.0.0" tomli = {version = "*", markers = "python_version < \"3.11\""} @@ -1927,18 +2054,18 @@ test = ["wheel", "zope.testing", "zope.testrunner"] [[package]] name = "zipp" -version = "3.17.0" +version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0"