diff --git a/CHANGES.txt b/CHANGES.txt index 3ea20690..8a41a275 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -38,6 +38,22 @@ Changelog - Fix crash for non-numeric Ceph version strings +- Restore default value for missing entries on daemon reload + +- Crash on invalid config while reloading + +- Replace prettytable with rich + +- Replace telnet shell with HTTP API + +- Support backup job migration across servers + +- Add `tags {set, add, remove}` subcommand + +- Add `expire` subcommand + +- Migrate `backy check` to `backy client check` and use the new HTTP API + 2.4.3 (2019-04-17) ================== diff --git a/README.txt b/README.txt index a9026897..97d5808c 100644 --- a/README.txt +++ b/README.txt @@ -95,13 +95,6 @@ configurable. Features ======== -Telnet shell ------------- - -Telnet into localhost port 6023 to get an interactive console. The console can -currently be used to inspect the scheduler's live status. - - Self-check ---------- diff --git a/doc/man-backy.rst b/doc/man-backy.rst index ec71634f..203d62c6 100644 --- a/doc/man-backy.rst +++ b/doc/man-backy.rst @@ -241,24 +241,6 @@ environment variables like **CEPH_CLUSTER** or **CEPH_ARGS**. **backy scheduler** processes exit cleanly on SIGTERM. -Telnet shell ------------- - -The schedules opens a telnet server (default: localhost port 6023) for live -inspection. The telnet interface accepts the following commands: - -jobs [REGEX] - Prints an overview of all configured jobs together with their last and - next backup run. An optional (extended) regular expression restricts output - to matching job names. - -status - Dumps internal server status details. - -quit - Exits the telnet shell. - - Files ----- @@ -288,12 +270,12 @@ config status-interval Update status file every N seconds (default: 30). - telnet-addrs - Comma-separated list of listen addresses for the telnet server + api-addrs + Comma-separated list of listen addresses for the api server (default: 127.0.0.1, ::1). - telnet-port - Port number of the telnet server (default: 6023). + api-port + Port number of the api server (default: 6023). .. _schedules: diff --git a/poetry.lock b/poetry.lock index 5a404577..1f3295c3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,146 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.8.4" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"}, + {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"}, + {file = "aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea"}, + {file = "aiohttp-3.8.4-cp310-cp310-win32.whl", hash = "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1"}, + {file = "aiohttp-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24"}, + {file = "aiohttp-3.8.4-cp311-cp311-win32.whl", hash = "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d"}, + {file = "aiohttp-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc"}, + {file = "aiohttp-3.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff"}, + {file = "aiohttp-3.8.4-cp36-cp36m-win32.whl", hash = "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777"}, + {file = "aiohttp-3.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e"}, + {file = "aiohttp-3.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241"}, + {file = "aiohttp-3.8.4-cp37-cp37m-win32.whl", hash = "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a"}, + {file = "aiohttp-3.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d"}, + {file = "aiohttp-3.8.4-cp38-cp38-win32.whl", hash = "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54"}, + {file = "aiohttp-3.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4"}, + {file = "aiohttp-3.8.4-cp39-cp39-win32.whl", hash = "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a"}, + {file = "aiohttp-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04"}, + {file = "aiohttp-3.8.4.tar.gz", hash = "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<4.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "cchardet"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] [[package]] name = "attrs" version = "22.2.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -312,6 +448,90 @@ mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.10.0,<2.11.0" pyflakes = ">=3.0.0,<3.1.0" +[[package]] +name = "frozenlist" +version = "1.3.3" +description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, + {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, + {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, + {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, + {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, + {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, + {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, + {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, + {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, + {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, + {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, +] + [[package]] name = "humanize" version = "4.6.0" @@ -366,6 +586,31 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "markdown-it-py" +version = "2.2.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"}, + {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "mccabe" version = "0.7.0" @@ -378,6 +623,18 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "mmh3" version = "3.0.0" @@ -414,6 +671,90 @@ files = [ {file = "mmh3-3.0.0.tar.gz", hash = "sha256:d1ec578c09a07d3518ec9be540b87546397fa3455de73c166fcce51eaa5c41c5"}, ] +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + [[package]] name = "nodeenv" version = "1.7.0" @@ -492,24 +833,6 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" -[[package]] -name = "prettytable" -version = "3.6.0" -description = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "prettytable-3.6.0-py3-none-any.whl", hash = "sha256:3b767129491767a3a5108e6f305cbaa650f8020a7db5dfe994a2df7ef7bad0fe"}, - {file = "prettytable-3.6.0.tar.gz", hash = "sha256:2e0026af955b4ea67b22122f310b90eae890738c08cb0458693a49b6221530ac"}, -] - -[package.dependencies] -wcwidth = "*" - -[package.extras] -tests = ["pytest", "pytest-cov", "pytest-lazy-fixture"] - [[package]] name = "pycodestyle" version = "2.10.0" @@ -534,6 +857,21 @@ files = [ {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, ] +[[package]] +name = "pygments" +version = "2.14.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pytest" version = "7.2.1" @@ -558,6 +896,26 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +[[package]] +name = "pytest-aiohttp" +version = "1.0.4" +description = "Pytest plugin for aiohttp support" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-aiohttp-1.0.4.tar.gz", hash = "sha256:39ff3a0d15484c01d1436cbedad575c6eafbf0f57cdf76fb94994c97b5b8c5a4"}, + {file = "pytest_aiohttp-1.0.4-py3-none-any.whl", hash = "sha256:1d2dc3a304c2be1fd496c0c2fb6b31ab60cd9fc33984f761f951f8ea1eb4ca95"}, +] + +[package.dependencies] +aiohttp = ">=3.8.1" +pytest = ">=6.1.0" +pytest-asyncio = ">=0.17.2" + +[package.extras] +testing = ["coverage (==6.2)", "mypy (==0.931)"] + [[package]] name = "pytest-asyncio" version = "0.20.3" @@ -729,6 +1087,25 @@ urllib3 = ">=1.21.1,<1.27" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rich" +version = "13.3.2" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.3.2-py3-none-any.whl", hash = "sha256:a104f37270bf677148d8acb07d33be1569eeee87e2d1beb286a4e9113caf6f2f"}, + {file = "rich-13.3.2.tar.gz", hash = "sha256:91954fe80cfb7985727a467ca98a7618e5dd15178cc2da10f553b36a93859001"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0,<3.0.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "setuptools" version = "67.3.1" @@ -776,17 +1153,6 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib- tests = ["coverage[toml]", "freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] typing = ["mypy", "rich", "twisted"] -[[package]] -name = "telnetlib3" -version = "2.0.0" -description = "Python 3 asyncio Telnet server and client Protocol library" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "telnetlib3-2.0.0-py2.py3-none-any.whl", hash = "sha256:ddfc4e1054a6d86e92e067895b70f35ff98a0562616856ac69c60f1ab1d25c7b"}, -] - [[package]] name = "tomli" version = "2.0.1" @@ -870,17 +1236,94 @@ docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx- test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] [[package]] -name = "wcwidth" -version = "0.2.6" -description = "Measures the displayed width of unicode strings in a terminal" +name = "yarl" +version = "1.8.2" +description = "Yet another URL library" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"}, + {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"}, + {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"}, + {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"}, + {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"}, + {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"}, + {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"}, + {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"}, + {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"}, + {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"}, + {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"}, + {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"}, + {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"}, ] +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + [metadata] lock-version = "2.0" python-versions = "~3.10" -content-hash = "d2f9975e38378375313a0d393ac888880d8e807fa6e2ecba033107fabec236a8" +content-hash = "9f5e22ae2bcb15efc3bcf1b3d213f820d0ca41c9a36f16ae610e49c18ab36ca6" diff --git a/pyproject.toml b/pyproject.toml index 3efc5bf1..4b3fba4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,18 +40,19 @@ consulate-fc-nix-test = "1.1.0a1" humanize = "^4.4.0" mmh3 = "^3.0" packaging = "^22.0" -prettytable = "^3.6.0" python-lzo = "^1.14" requests = "^2.28.0" shortuuid = "^1.0.11" structlog = "^22.3.0" -telnetlib3 = "^2.0.0" tzlocal = "^4.2" colorama = "^0.4.6" +aiohttp = "^3.8.4" +rich = "^13.3.2" [tool.poetry.dev-dependencies] pre-commit = "^2.21.0" pytest = "^7.2.0" +pytest-aiohttp = "^1.0.4" pytest-asyncio = "^0.20.3" pytest-cache = "^1.0" pytest-cov = "^4.0.0" diff --git a/setup.py b/setup.py index e471d2dc..2fc71fae 100644 --- a/setup.py +++ b/setup.py @@ -55,16 +55,16 @@ def version(): install_requires=[ "consulate", "packaging", - "prettytable", "tzlocal", "PyYaml", "setuptools", "shortuuid", "python-lzo", - "telnetlib3>=1.0", "humanize", "mmh3", "structlog", + "aiohttp", + "rich", ], extras_require={ "test": [ diff --git a/src/backy/api.py b/src/backy/api.py new file mode 100644 index 00000000..6bb0dc67 --- /dev/null +++ b/src/backy/api.py @@ -0,0 +1,233 @@ +import datetime +import re +from json import JSONEncoder +from os import path as p +from typing import Any, Tuple + +from aiohttp import hdrs, web +from aiohttp.web_exceptions import ( + HTTPAccepted, + HTTPBadRequest, + HTTPForbidden, + HTTPNotFound, + HTTPPreconditionFailed, + HTTPPreconditionRequired, + HTTPServiceUnavailable, + HTTPUnauthorized, +) +from aiohttp.web_middlewares import middleware +from aiohttp.web_runner import AppRunner, TCPSite +from structlog.stdlib import BoundLogger + +import backy.daemon +from backy.backup import Backup +from backy.revision import Revision + + +class BackyJSONEncoder(JSONEncoder): + def default(self, o: Any) -> Any: + if hasattr(o, "to_dict"): + return o.to_dict() + elif isinstance(o, datetime.datetime): + return o.isoformat() + else: + super().default(o) + + +class BackyAPI: + daemon: "backy.daemon.BackyDaemon" + sites: dict[Tuple[str, int], TCPSite] + runner: AppRunner + tokens: dict + log: BoundLogger + + def __init__(self, daemon, log): + self.log = log.bind(subsystem="api") + self.daemon = daemon + self.sites = {} + self.app = web.Application( + middlewares=[self.log_conn, self.require_auth, self.to_json] + ) + self.app.add_routes( + [ + web.get("/status", self.get_status), + web.post("/reload", self.reload_daemon), + web.get("/jobs", self.get_jobs), + # web.get("/jobs/{job_name}", self.get_job), + web.post("/jobs/{job_name}/run", self.run_job), + web.get("/backups", self.list_backups), + # web.get("/backups/{backup_name}", self.get_backup), + web.post("/backups/{backup_name}/purge", self.run_purge), + web.post("/backups/{backup_name}/touch", self.touch_backup), + web.get("/backups/{backup_name}/revs", self.get_revs), + # web.get("/backups/{backup_name}/revs/{rev_spec}", self.get_rev), + web.put( + "/backups/{backup_name}/revs/{rev_spec}/tags", self.put_tags + ), + ] + ) + + async def start(self): + self.runner = AppRunner(self.app) + await self.runner.setup() + + async def stop(self): + await self.runner.cleanup() + self.sites = {} + + async def reconfigure(self, tokens, addrs, port): + self.log.debug("reconfigure") + self.tokens = tokens + endpoints = [(addr, port) for addr in addrs if addr and port] + for ep in endpoints: + if ep not in self.sites: + self.sites[ep] = site = TCPSite(self.runner, ep[0], ep[1]) + await site.start() + self.log.info("added-site", site=site.name) + for ep, site in list(self.sites.items()): + if ep not in endpoints: + await site.stop() + del self.sites[ep] + self.log.info("deleted-site", site=site.name) + + @middleware + async def log_conn(self, request: web.Request, handler): + request["log"] = self.log.bind( + path=request.path, query=request.query_string + ) + try: + resp = await handler(request) + except Exception as e: + if not isinstance(e, web.HTTPException): + request["log"].exception("error-handling-request") + else: + request["log"].debug( + "request-result", status_code=e.status_code + ) + raise + request["log"].debug("request-result", response=resp.body) + return resp + + @middleware + async def require_auth(self, request: web.Request, handler): + request["log"].debug("new-conn") + token = request.headers.get(hdrs.AUTHORIZATION, "") + if not token.startswith("Bearer "): + request["log"].info("auth-invalid-token") + raise HTTPUnauthorized() + token = token.removeprefix("Bearer ") + if len(token) < 3: # avoid potential truthiness edge cases + request["log"].info("auth-token-too-short") + raise HTTPUnauthorized() + client = self.tokens.get(token, None) + if not client: + request["log"].info("auth-token-unknown") + raise HTTPUnauthorized() + request["client"] = client + request["log"] = request["log"].bind(client=client) + request["log"].debug("auth-passed") + return await handler(request) + + @middleware + async def to_json(self, request: web.Request, handler): + resp = await handler(request) + if isinstance(resp, web.Response): + return resp + elif resp is None: + raise web.HTTPNoContent() + else: + return web.json_response(resp, dumps=BackyJSONEncoder().encode) + + async def get_status(self, request: web.Request): + filter = request.query.get("filter", "") + if filter: + filter = re.compile(filter) + return self.daemon.status(filter) + + async def reload_daemon(self, request: web.Request): + self.daemon.reload() + + async def get_jobs(self, request: web.Request): + return list(self.daemon.jobs.values()) + + async def get_job(self, request: web.Request): + try: + name = request.match_info.get("job_name", None) + return self.daemon.jobs[name] + except KeyError: + raise HTTPNotFound() + + async def run_job(self, request: web.Request): + j = await self.get_job(request) + j.run_immediately.set() + raise HTTPAccepted() + + async def list_backups(self, request: web.Request): + backups = self.daemon.find_all_backups() + return [b for b in backups if b not in self.daemon.jobs] + + async def get_backup(self, request: web.Request) -> Backup: + name = request.match_info.get("backup_name", None) + if not name: + raise HTTPNotFound() + if name in self.daemon.jobs: + raise HTTPForbidden() + try: + return Backup(p.join(self.daemon.base_dir, name), request["log"]) + except FileNotFoundError: + raise HTTPNotFound() + + async def run_purge(self, request: web.Request): + backup = await self.get_backup(request) + backup.set_purge_pending() + raise HTTPAccepted() + + async def touch_backup(self, request: web.Request): + backup = await self.get_backup(request) + backup.touch() + + async def get_revs(self, request: web.Request): + backup = await self.get_backup(request) + if request.query.get("only_clean", "") == "1": + revs = backup.clean_history + else: + revs = backup.history + return [r for r in revs if not r.location] + + async def get_rev(self, request: web.Request) -> Revision: + spec = request.match_info.get("rev_spec", None) + backup = await self.get_backup(request) + try: + rev = backup.find(spec) + if rev.location: + raise HTTPNotFound() + return rev + except KeyError: + raise HTTPNotFound() + + async def put_tags(self, request: web.Request): + json = await request.json() + if "old_tags" not in json: + raise HTTPPreconditionRequired() + old_tags = set(json["old_tags"]) + if "new_tags" not in json: + raise HTTPBadRequest() + new_tags = set(json["new_tags"]) + + autoremove = request.query.get("autoremove", "") == "1" + spec = request.match_info.get("rev_spec", None) + backup = await self.get_backup(request) + try: + if not backup.tags( + "set", + spec, + new_tags, + old_tags, + autoremove=autoremove, + force=True, + ): + raise HTTPPreconditionFailed() + except KeyError: + raise HTTPNotFound() + except BlockingIOError: + raise HTTPServiceUnavailable() diff --git a/src/backy/backends/chunked/__init__.py b/src/backy/backends/chunked/__init__.py index 11f95eba..6f5c378d 100644 --- a/src/backy/backends/chunked/__init__.py +++ b/src/backy/backends/chunked/__init__.py @@ -29,7 +29,7 @@ def __init__(self, revision: Revision, log: BoundLogger): self.revision.backup.path + "/chunks", log ) self.store = self.STORES[path] - self.log = log + self.log = log.bind(subsystem="chunked-backend") def open(self, mode="rb"): if "w" in mode or "+" in mode and self.clone_parent: diff --git a/src/backy/backends/cowfile.py b/src/backy/backends/cowfile.py index 96ea37a4..0a81a6b0 100644 --- a/src/backy/backends/cowfile.py +++ b/src/backy/backends/cowfile.py @@ -1,7 +1,5 @@ import os.path -from structlog.stdlib import BoundLogger - import backy.revision from backy.backends import BackyBackend from backy.utils import CHUNK_SIZE, cp_reflink diff --git a/src/backy/backup.py b/src/backy/backup.py index 5f678996..e31c382c 100644 --- a/src/backy/backup.py +++ b/src/backy/backup.py @@ -3,9 +3,15 @@ import os import os.path as p import time -from typing import IO, Type +from typing import IO, Literal, Optional, Type import yaml +from aiohttp import ClientError, ClientResponseError +from aiohttp.web_exceptions import ( + HTTPForbidden, + HTTPNotFound, + HTTPPreconditionFailed, +) from structlog.stdlib import BoundLogger from backy.utils import min_date @@ -13,6 +19,7 @@ from .backends import BackyBackend from .backends.chunked import ChunkedFileBackend from .backends.cowfile import COWFileBackend +from .client import APIClientManager from .nbd.server import Server from .revision import Revision, Trust, filter_schedule_tags from .schedule import Schedule @@ -54,10 +61,10 @@ def locked_function(self, *args, **kw): except BlockingIOError: self.log.warning( "lock-no-exclusive", - _fmt_msg="Failed to get exclusive lock for '{function}'. Continuing.", + _fmt_msg="Failed to get exclusive lock for '{function}'.", function=f.__name__, ) - return + raise else: try: return f(self, *args, **kw) @@ -82,6 +89,7 @@ class Backup(object): """ path: str + name: str config: dict schedule: Schedule source: BackySourceFactory @@ -98,6 +106,7 @@ def __init__(self, path, log): self._lock_fds = {} self.path = p.realpath(path) + self.name = p.basename(self.path) self.scan() # Load config from file @@ -147,6 +156,9 @@ def __init__(self, path, log): "Unsupported backend_type '{}'".format(self.backend_type) ) + def to_dict(self): + return self.config + def scan(self): self.history = [] self._by_uuid = {} @@ -161,11 +173,35 @@ def scan(self): # The history is stored: oldest first. newest last. self.history.sort(key=lambda r: r.timestamp) + def touch(self): + os.utime(self.path, None) + + def set_purge_pending(self): + open(p.join(self.path, ".purge_pending"), "w").close() + + def remove_purge_pending(self): + path = p.join(self.path, ".purge_pending") + if p.exists(path): + os.remove(path) + @property def clean_history(self): """History without incomplete revisions.""" return [rev for rev in self.history if "duration" in rev.stats] + def validate_tags(self, tags): + missing_tags = ( + filter_schedule_tags(tags) - self.schedule.schedule.keys() + ) + if missing_tags: + self.log.error( + "unknown-tags", + _fmt_msg="The following tags are missing from the schedule: {unknown_tags}\n" + "Check the config file, add the `manual:` prefix or disable tag validation (-f)", + unknown_tags=", ".join(missing_tags), + ) + raise RuntimeError("Unknown tags") + ################# # Making backups @@ -184,21 +220,47 @@ def forget_revision(self, revision): r = self.find(revision) r.remove() + @locked(target=".backup", mode="exclusive") + def expire(self): + self.schedule.expire(self) + + @locked(target=".backup", mode="exclusive") + def tags( + self, + action: Literal["set", "add", "remove"], + revision: str, + tags: set[str], + expect: Optional[set[str]] = None, + autoremove: bool = False, + force=False, + ) -> bool: + self.scan() + r = self.find(revision) + if not force: + self.validate_tags(tags) + if expect is not None and expect != r.tags: + self.log.info("tags-expectation-failed") + return False + match action: + case "set": + r.tags = tags + case "add": + r.tags |= tags + case "remove": + r.tags -= tags + case _: + raise ValueError(f"invalid action '{action}'") + if not r.tags and autoremove: + r.remove() + else: + r.materialize() + return True + @locked(target=".backup", mode="exclusive") @locked(target=".purge", mode="shared") def backup(self, tags, force=False): if not force: - missing_tags = ( - filter_schedule_tags(tags) - self.schedule.schedule.keys() - ) - if missing_tags: - self.log.error( - "unknown-tags", - _fmt_msg="The following tags are missing from the schedule: {unknown_tags}\n" - "Check the config file, add the `manual:` prefix or disable tag validation (-f)", - unknown_tags=", ".join(missing_tags), - ) - raise RuntimeError("Unknown tags") + self.validate_tags(tags) start = time.time() @@ -280,6 +342,7 @@ def verify(self, revision=None): def purge(self): backend = self.backend_factory(self.history[0], self.log) backend.purge() + self.remove_purge_pending() ################# # Restoring @@ -469,7 +532,7 @@ def find_by_uuid(self, spec): except KeyError: raise IndexError() - def find(self, spec): + def find(self, spec) -> Revision: """Flexible revision search. Locates a revision by relative number, by tag, or by uuid. @@ -485,3 +548,94 @@ def find(self, spec): pass self.log.warning("find-rev-not-found", spec=spec) raise KeyError(spec) + + ################### + # Syncing Revisions + + @locked(target=".backup", mode="exclusive") + async def push_metadata(self, api_servers): + self.log.info("push-metadata-start") + async with APIClientManager(api_servers, self.log) as apis: + servers_with_removals = set() + for r in self.history: + if not r.pending_changes: + continue + + self.log.debug( + "push-metadata-updating-tags", + server=r.location, + rev_uuid=r.uuid, + old_tags=r.orig_tags, + new_tags=r.tags, + ) + try: + await apis[r.location].put_tags(r, autoremove=True) + if not r.tags: + r.remove(force=True) + servers_with_removals.add(r.location) + except (ClientError, KeyError) as e: + if ( + isinstance(e, ClientResponseError) + and e.status == HTTPPreconditionFailed.status_code + ): + self.log.warning( + "push-metadata-unexpected-server-state", + expected_tags=r.orig_tags, + ) + else: + self.log.exception("push-metadata-error") + r.remove(force=True) + + for s in servers_with_removals: + self.log.debug("push-metadata-purging-remote", server=s) + try: + await apis[s].run_purge(self.name) + except ClientError: + self.log.warning( + "push-metadata-remote-purge-error", + exc_info=True, + ) + continue + + @locked(target=".backup", mode="exclusive") + async def pull_metadata(self, api_servers): + self.log.info("pull-metadata-start") + async with APIClientManager(api_servers, self.log) as apis: + for server in apis: + try: + await apis[server].touch_backup(self.name) + remote_revs = await apis[server].get_revs(self) + except ClientError as e: + if not isinstance( + e, ClientResponseError + ) or e.status not in ( + HTTPNotFound.status_code, + HTTPForbidden.status_code, + ): + self.log.warning("pull-metadata-error", exc_info=True) + else: + self.log.debug("pull-metadata-not-found", server=server) + continue + self.log.debug( + "pull-metadata-found-matching-server", + server=server, + revs=len(remote_revs), + ) + + matching_uuids = { + r.uuid for r in self.history if r.location == server + } + remote_uuids = {r.uuid for r in remote_revs} + for uuid in matching_uuids - remote_uuids: + self.log.warning( + "pull-metadata-removing-unknown-rev", rev_uuid=uuid + ) + self.find_by_uuid(uuid).remove(force=True) + + for r in remote_revs: + r.materialize() + self.log.debug( + "pull-metadata-updated-rev", + server=server, + rev_uid=r.uuid, + ) diff --git a/src/backy/client.py b/src/backy/client.py new file mode 100644 index 00000000..01f8f8cf --- /dev/null +++ b/src/backy/client.py @@ -0,0 +1,289 @@ +import datetime +from asyncio import get_running_loop + +import aiohttp +import humanize +from aiohttp import ClientTimeout, TCPConnector, hdrs +from aiohttp.web_exceptions import HTTPNotFound +from rich import print as rprint +from rich.table import Column, Table +from structlog.stdlib import BoundLogger +from yarl import URL + +import backy.backup +from backy.revision import Revision +from backy.utils import format_datetime_local + + +class APIClientManager: + connector: TCPConnector + servers: dict + clients: dict[str, "APIClient"] + log: BoundLogger + + def __init__(self, servers, log): + self.connector = TCPConnector() + self.servers = servers + self.clients = dict() + self.log = log.bind(subsystem="APIClientManager") + + def __getitem__(self, name): + if name and name not in self.clients: + self.clients[name] = APIClient.from_conf( + name, self.servers[name], self.log, self.connector + ) + return self.clients[name] + + def __iter__(self): + return filter(None, self.servers) + + async def close(self): + for c in self.clients.values(): + await c.close() + await self.connector.close() + + async def __aenter__(self) -> "APIClientManager": + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close() + + +class APIClient: + log: BoundLogger + session: aiohttp.ClientSession + + def __init__( + self, + server_name: str, + host: str, + port: int, + token: str, + log, + connector=None, + ): + assert get_running_loop().is_running() + self.log = log.bind(subsystem="APIClient") + self.server_name = server_name + self.session = aiohttp.ClientSession( + URL.build(scheme="https", host=host, port=port), + headers={hdrs.AUTHORIZATION: "Bearer " + token}, + raise_for_status=True, + timeout=ClientTimeout(30, connect=10), + connector=connector, + connector_owner=connector is None, + ) + + @classmethod + def from_conf(cls, server_name, conf, *args, **kwargs): + return cls( + server_name, + conf["host"], + conf["port"], + conf["token"], + *args, + **kwargs, + ) + + async def fetch_status(self, filter=""): + async with self.session.get( + "/status", params={"filter": filter} + ) as response: + jobs = await response.json() + for job in jobs: + if job["last_time"]: + job["last_time"] = datetime.datetime.fromisoformat( + job["last_time"] + ) + if job["next_time"]: + job["next_time"] = datetime.datetime.fromisoformat( + job["next_time"] + ) + return jobs + + async def reload_daemon(self): + async with self.session.post(f"/reload") as response: + return + + async def get_jobs(self): + async with self.session.get("/jobs") as response: + return await response.json() + + async def run_job(self, name): + async with self.session.post(f"/jobs/{name}/run") as response: + return + + async def list_backups(self): + async with self.session.get("/backups") as response: + return await response.json() + + async def run_purge(self, name): + async with self.session.post(f"/backups/{name}/purge") as response: + return + + async def touch_backup(self, name): + async with self.session.post(f"/backups/{name}/touch") as response: + return + + async def get_revs( + self, backup: "backy.backup.Backup", only_clean: bool = True + ): + async with self.session.get( + f"/backups/{backup.name}/revs", + params={"only_clean": int(only_clean)}, + ) as response: + json = await response.json() + revs = [Revision.from_dict(r, backup, self.log) for r in json] + for r in revs: + r.backend_type = "" + r.parent = None + r.orig_tags = r.tags + r.location = self.server_name + return revs + + async def put_tags(self, rev: Revision, autoremove: bool = False): + async with self.session.put( + f"/backups/{rev.backup.name}/revs/{rev.uuid}/tags", + json={"old_tags": list(rev.orig_tags), "new_tags": list(rev.tags)}, + params={"autoremove": int(autoremove)}, + ) as response: + return + + async def close(self): + await self.session.close() + + async def __aenter__(self) -> "APIClient": + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close() + + +class CLIClient: + api: APIClient + log: BoundLogger + + def __init__(self, apiclient, log): + self.api = apiclient + self.log = log.bind(subsystem="CLIClient") + + async def __aenter__(self) -> "CLIClient": + await self.api.__aenter__() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.api.__aexit__(exc_type, exc_val, exc_tb) + + async def jobs(self, filter_re=""): + """List status of all known jobs. Optionally filter by regex.""" + + tz = format_datetime_local(None)[1] + + t = Table( + "Job", + "SLA", + "SLA overdue", + "Status", + f"Last Backup ({tz})", + "Last Tags", + Column("Last Duration", justify="right"), + f"Next Backup ({tz})", + "Next Tags", + ) + + jobs = await self.api.fetch_status(filter_re) + jobs.sort(key=lambda j: j["job"]) + for job in jobs: + overdue = ( + humanize.naturaldelta(job["sla_overdue"]) + if job["sla_overdue"] + else "-" + ) + last_duration = ( + humanize.naturaldelta(job["last_duration"]) + if job["last_duration"] + else "-" + ) + last_time = format_datetime_local(job["last_time"])[0] + next_time = format_datetime_local(job["next_time"])[0] + + t.add_row( + job["job"], + job["sla"], + overdue, + job["status"], + last_time, + job["last_tags"], + last_duration, + next_time, + job["next_tags"], + ) + + rprint(t) + print("{} jobs shown".format(len(jobs))) + + async def status(self): + """Show job status overview""" + t = Table("Status", "#") + state_summary = {} + jobs = await self.api.get_jobs() + for job in jobs: + state_summary.setdefault(job["status"], 0) + state_summary[job["status"]] += 1 + + for state in sorted(state_summary): + t.add_row(state, str(state_summary[state])) + rprint(t) + + async def run(self, job: str): + """Show job status overview""" + try: + await self.api.run_job(job) + except HTTPNotFound: + print("Unknown job {}".format(job)) + return + print("Triggered immediate run for {}".format(job)) + + async def runall(self): + """Show job status overview""" + jobs = await self.api.get_jobs() + for job in jobs: + await self.run(job["name"]) + + async def reload(self): + """Reload the configuration.""" + print("Triggering daemon reload.") + await self.api.reload_daemon() + print("Daemon configuration reloaded.") + + async def check(self): + status = await self.api.fetch_status() + + failed_jobs = [] + + for job in status: + if job["manual_tags"]: + self.log.info( + "check-manual-tags", + manual_tags=job["manual_tags"], + job_name=job["job"], + ) + if job["unsynced_revs"]: + self.log.info( + "check-unsynced-revs", unsynced_revs=job["unsynced_revs"] + ) + if job["sla"] != "OK": + failed_jobs.append(job) + + if failed_jobs: + for f in failed_jobs: + self.log.critical( + "check-sla-violation", + job_name=f["job"], + last_time=str(f["last_time"]), + sla_overdue=f["sla_overdue"], + ) + self.log.debug("check-jobs-failed", failed_jobs=len(failed_jobs)) + raise RuntimeError("SLA Violation") + + self.log.info("check-within-sla", num=len(status)) diff --git a/src/backy/conftest.py b/src/backy/conftest.py index 899851a4..200ee9df 100644 --- a/src/backy/conftest.py +++ b/src/backy/conftest.py @@ -41,11 +41,7 @@ def pytest_assertrepr_compare(op, left, right): @pytest.fixture(autouse=True) def log(monkeypatch): - def noop_init_logging( - verbose, - logfile=None, - default_job_name="", - ): + def noop_init_logging(*args, **kwargs): pass monkeypatch.setattr(backy.logging, "init_logging", noop_init_logging) diff --git a/src/backy/daemon.py b/src/backy/daemon.py index bbaf9046..ae2917df 100644 --- a/src/backy/daemon.py +++ b/src/backy/daemon.py @@ -2,24 +2,20 @@ import fcntl import os import os.path as p -import re import shutil import signal import sys import time from typing import IO, Optional -import humanize -import pkg_resources -import prettytable -import telnetlib3 import yaml from structlog.stdlib import BoundLogger +from .api import BackyAPI from .revision import filter_manual_tags from .schedule import Schedule from .scheduler import Job -from .utils import SafeFile, format_datetime_local, has_recent_changes +from .utils import has_recent_changes daemon: "BackyDaemon" @@ -27,30 +23,34 @@ class BackyDaemon(object): # config defaults, will be overriden from config file worker_limit: int = 1 - base_dir: Optional[str] = None - status_file: Optional[str] = None - status_interval: int = 30 - telnet_addrs: str = "::1, 127.0.0.1" - telnet_port: int = 6023 + base_dir: str + api_addrs: str = "::1, 127.0.0.1" + api_port: int = 6023 + api_tokens: dict[str, str] + api_servers: dict config_file: str - config: Optional[dict] + config: dict schedules: dict[str, Schedule] jobs: dict[str, Job] backup_semaphores: dict[str, asyncio.BoundedSemaphore] log: BoundLogger _lock: Optional[IO] = None + reload_api: asyncio.Event loop: Optional[asyncio.AbstractEventLoop] = None def __init__(self, config_file, log): self.config_file = config_file self.log = log.bind(subsystem="daemon") - self.config = None + self.config = {} self.schedules = {} self.backup_semaphores = {} self.jobs = {} self._lock = None + self.reload_api = asyncio.Event() + self.api_tokens = {} + self.api_servers = {} def _read_config(self): if not p.exists(self.config_file): @@ -61,16 +61,14 @@ def _read_config(self): self.config = yaml.safe_load(f) g = self.config.get("global", {}) - self.worker_limit = int(g.get("worker-limit", self.worker_limit)) - self.base_dir = g.get("base-dir", self.base_dir) - if not self.status_file: - self.status_file = p.join(self.base_dir, "status") - self.status_file = g.get("status-file", self.status_file) - self.status_interval = int( - g.get("status-interval", self.status_interval) - ) - self.telnet_addrs = g.get("telnet-addrs", self.telnet_addrs) - self.telnet_port = int(g.get("telnet-port", self.telnet_port)) + self.worker_limit = int(g.get("worker-limit", type(self).worker_limit)) + self.base_dir = g.get("base-dir") + api = self.config.get("api", {}) + self.api_addrs = api.get("addrs", type(self).api_addrs) + self.api_port = int(api.get("port", type(self).api_port)) + + self.api_servers = api.get("servers", {}) + self.api_tokens = api.get("tokens", {}) new = {} for name, config in self.config["schedules"].items(): @@ -83,11 +81,11 @@ def _read_config(self): self.log.info( "read-config", - status_interval=self.status_interval, - status_file=self.status_file, worker_limit=self.worker_limit, base_dir=self.base_dir, schedules=", ".join(self.schedules), + api_addrs=self.api_addrs, + api_port=self.api_port, ) def _apply_config(self): @@ -150,8 +148,10 @@ def start(self, loop): self._apply_config() - loop.create_task(self.save_status_file(), name="save-status") loop.create_task(self.purge_old_files(), name="purge-old-files") + loop.create_task( + self.purge_pending_backups(), name="purge-pending-backups" + ) loop.create_task(self.shutdown_loop(), name="shutdown-cleanup") def handle_signals(signum, _traceback): @@ -179,23 +179,36 @@ def reload(self): self.log.info("reloading") try: self._read_config() - except RuntimeError: - self.log.exception("error-reading-config") - else: self._apply_config() + self.reload_api.set() self.log.info("reloading-finished") + except Exception: + self.log.critical("error-reloading", exc_info=True) + self.terminate() + raise - def telnet_server(self): - """Starts to listen on all configured telnet addresses.""" - assert self.loop, "cannot start telnet server without event loop" - for addr in (a.strip() for a in self.telnet_addrs.split(",")): - self.log.info("telnet-starting", addr=addr, port=self.telnet_port) - server = telnetlib3.create_server( - host=addr, port=self.telnet_port, shell=telnet_server_shell - ) - self.loop.create_task( - server, name=f"telnet-server-{addr}-{self.telnet_port}" + def api_server(self): + assert self.loop, "cannot start api server without event loop" + self.loop.create_task(self.api_server_loop(), name="api_server_loop") + + async def api_server_loop(self): + try: + self.log.info( + "api-starting", addrs=self.api_addrs, port=self.api_port ) + api = BackyAPI(self, self.log) + await api.start() + while True: + self.log.info("api-reconfigure") + await api.reconfigure( + self.api_tokens, + [a.strip() for a in self.api_addrs.split(",")], + self.api_port, + ) + self.reload_api.clear() + await self.reload_api.wait() + except Exception: + self.log.exception("api_server_loop") def terminate(self): self.log.info("terminating") @@ -231,10 +244,13 @@ def status(self, filter_re=None): continue job.backup.scan() manual_tags = set() + unsynced_revs = 0 if job.backup.clean_history: last = job.backup.clean_history[-1] for rev in job.backup.clean_history: manual_tags |= filter_manual_tags(rev.tags) + if rev.pending_changes: + unsynced_revs += 1 else: last = None result.append( @@ -259,268 +275,61 @@ def status(self, filter_re=None): else None ), manual_tags=", ".join(manual_tags), + unsynced_revs=unsynced_revs, ) ) return result - def _write_status_file(self): - status = self.status() - with SafeFile(self.status_file, sync=False) as tmp: - tmp.protected_mode = 0o644 - tmp.open_new("w") - yaml.safe_dump(status, tmp.f) - for job in status: - if not job["sla_overdue"]: - continue - overdue = humanize.naturaldelta(job["sla_overdue"]) - self.log.warning( - "sla-violation", job_name=job["job"], overdue=overdue - ) - - async def save_status_file(self): - while True: - try: - self._write_status_file() - except Exception: # pragma: no cover - self.log.exception("save-status-exception") - await asyncio.sleep(self.status_interval) - async def purge_old_files(self): # `stat` and other file system access things are _not_ # properly async, we might want to spawn those off into a separate # thread. while True: - self.log.info("purge-scanning") - for candidate in os.scandir(self.base_dir): - if not candidate.is_dir(follow_symlinks=False): - continue - self.log.debug("purge-candidate", candidate=candidate.path) - reference_time = time.time() - 3 * 31 * 24 * 60 * 60 - if not has_recent_changes(candidate, reference_time): - self.log.info("purging", candidate=candidate.path) - shutil.rmtree(candidate) - self.log.info("purge-finished") + try: + self.log.info("purge-scanning") + for candidate in os.scandir(self.base_dir): + if not candidate.is_dir(follow_symlinks=False): + continue + self.log.debug("purge-candidate", candidate=candidate.path) + reference_time = time.time() - 3 * 31 * 24 * 60 * 60 + if not has_recent_changes(candidate, reference_time): + self.log.info("purging", candidate=candidate.path) + shutil.rmtree(candidate) + self.log.info("purge-finished") + except Exception: + self.log.exception("purge") await asyncio.sleep(24 * 60 * 60) - def check(self): - try: - self._read_config() - except RuntimeError: - sys.exit(1) - - if not p.exists(self.status_file): - self.log.error("check-no-status-file", status_file=self.status_file) - sys.exit(3) - - # The output should be relatively new. Let's say 5 min max. - s = os.stat(self.status_file) - if time.time() - s.st_mtime > 5 * 60: - self.log.critical( - "check-old-status-file", age=time.time() - s.st_mtime - ) - sys.exit(2) - - failed_jobs = [] - - with open(self.status_file, encoding="utf-8") as f: - status = yaml.safe_load(f) - - for job in status: - if job["manual_tags"]: - self.log.info( - "check-manual-tags", - manual_tags=job["manual_tags"], - job_name=job["job"], - ) - if job["sla"] != "OK": - failed_jobs.append(job) - - if failed_jobs: - for f in failed_jobs: - self.log.critical( - "check-sla-violation", - job_name=f["job"], - last_time=str(f["last_time"]), - sla_overdue=f["sla_overdue"], - ) - self.log.debug("check-jobs-failed", failed_jobs=len(failed_jobs)) - sys.exit(2) - - self.log.info("check-within-sla", num=len(status)) - sys.exit(0) - - -async def telnet_server_shell(reader, writer): - """ - A default telnet shell, appropriate for use with telnetlib3.create_server. - This shell provides a very simple REPL, allowing introspection and state - toggling of the connected client session. - This function is a :func:`~asyncio.coroutine`. - """ - from telnetlib3.server_shell import CR, LF, readline - - writer.write( - "backy {}".format(pkg_resources.require("backy")[0].version) + CR + LF - ) - writer.write("Ready." + CR + LF) - - linereader = readline(reader, writer) - linereader.send(None) - - shell = SchedulerShell(writer=writer) - - command = None - while True: - if command: - writer.write(CR + LF) - writer.write("backy> ") - command = None - while command is None: - # TODO: use reader.readline() + async def purge_pending_backups(self): + # `stat` and other file system access things are _not_ + # properly async, we might want to spawn those off into a separate + # thread. + while True: try: - inp = await reader.read(1) + self.log.info("purge-pending-scanning") + for candidate in os.scandir(self.base_dir): + if ( + not candidate.is_dir(follow_symlinks=False) + or candidate.name in self.jobs # will get purged anyway + or not p.exists( + p.join(candidate.path, ".purge_pending") + ) + ): + continue + self.log.info("purging-pending", job=candidate.name) + await Job(self, candidate.name, self.log).run_purge() + self.log.info("purge-pending-finished") except Exception: - inp = None # Likely a timeout - if not inp: - return - command = linereader.send(inp) - command = command.strip() - writer.write(CR + LF) - if command == "quit": - writer.write("Goodbye." + CR + LF) - break - elif command == "help": - writer.write("jobs [filter], status, run , runall, reload") - elif command.startswith("jobs"): - if " " in command: - _, filter_re = command.split(" ", maxsplit=1) - else: - filter_re = None - shell.jobs(filter_re) - elif command == "reload": - shell.reload() - elif command == "status": - shell.status() - elif command.startswith("runall"): - shell.runall() - elif command.startswith("run"): - if " " in command: - _, job = command.split(" ", maxsplit=1) - else: - job = None - shell.run(job) - elif command: - writer.write("no such command.") - writer.close() - - -class SchedulerShell(object): - writer = None - - def __init__(self, writer): - self.writer = writer - - def jobs(self, filter_re=None): - """List status of all known jobs. Optionally filter by regex.""" - filter_re = re.compile(filter_re) if filter_re else None - - tz = format_datetime_local(None)[1] - - t = prettytable.PrettyTable( - [ - "Job", - "SLA", - "SLA overdue", - "Status", - f"Last Backup ({tz.zone})", - "Last Tags", - "Last Duration", - f"Next Backup ({tz.zone})", - "Next Tags", - ] - ) - t.align = "l" - t.align["Last Dur"] = "r" - t.sortby = "Job" - - jobs = daemon.status(filter_re) - for job in jobs: - overdue = ( - humanize.naturaldelta(job["sla_overdue"]) - if job["sla_overdue"] - else "-" - ) - last_duration = ( - humanize.naturaldelta(job["last_duration"]) - if job["last_duration"] - else "-" - ) - last_time = format_datetime_local(job["last_time"])[0] - next_time = format_datetime_local(job["next_time"])[0] - - t.add_row( - [ - job["job"], - job["sla"], - overdue, - job["status"], - last_time, - job["last_tags"], - last_duration, - next_time, - job["next_tags"], - ] - ) - - self.writer.write(t.get_string().replace("\n", "\r\n") + "\r\n") - self.writer.write("{} jobs shown".format(len(jobs))) - - def status(self): - """Show job status overview""" - t = prettytable.PrettyTable(["Status", "#"]) - state_summary = {} - for job in daemon.jobs.values(): - state_summary.setdefault(job.status, 0) - state_summary[job.status] += 1 - - for state in sorted(state_summary): - t.add_row([state, state_summary[state]]) - self.writer.write(t.get_string().replace("\n", "\r\n")) - - def run(self, job): - """Show job status overview""" - try: - job = daemon.jobs[job] - except KeyError: - self.writer.write("Unknown job {}".format(job)) - return - if not hasattr(job, "_task"): - self.writer.write("Task not ready. Try again later.") - return - job.run_immediately.set() - self.writer.write("Triggered immediate run for {}".format(job.name)) - - def runall(self): - """Show job status overview""" - for job in daemon.jobs.values(): - if not hasattr(job, "_task"): - self.writer.write( - "{} not ready. Try again later.".format(job.name) - ) - continue - job.run_immediately.set() - self.writer.write("Triggered immediate run for {}".format(job.name)) - - def reload(self): - """Reload the configuration.""" - self.writer.write("Triggering daemon reload.\r\n") - daemon.reload() - self.writer.write("Daemon configuration reloaded.\r\n") - + self.log.exception("purge-pending") + await asyncio.sleep(24 * 60 * 60) -def check(config_file, log: BoundLogger): # pragma: no cover - daemon = BackyDaemon(config_file, log) - daemon.check() + def find_all_backups(self): + self.log.info("scanning-backups") + return [ + b.name + for b in os.scandir(self.base_dir) + if b.is_dir(follow_symlinks=False) + ] def main(config_file, log: BoundLogger): # pragma: no cover @@ -529,5 +338,5 @@ def main(config_file, log: BoundLogger): # pragma: no cover loop = asyncio.get_event_loop() daemon = BackyDaemon(config_file, log) daemon.start(loop) - daemon.telnet_server() + daemon.api_server() daemon.run_forever() diff --git a/src/backy/fallocate.py b/src/backy/fallocate.py index b0b5b893..691c1ab5 100644 --- a/src/backy/fallocate.py +++ b/src/backy/fallocate.py @@ -14,6 +14,7 @@ def _fake_fallocate(fd, mode, offset, len_): + log.debug("fallocate-non-hole-punching") if len_ <= 0: raise IOError("fallocate: length must be positive") if mode & FALLOC_FL_PUNCH_HOLE: @@ -49,7 +50,6 @@ def fallocate(fd, mode, offset, len_): try: fallocate = _make_fallocate() except AttributeError: # pragma: no cover - log.debug("fallocate-non-hole-punching") fallocate = _fake_fallocate diff --git a/src/backy/logging.py b/src/backy/logging.py index 1a60f89f..6410a7f8 100644 --- a/src/backy/logging.py +++ b/src/backy/logging.py @@ -16,11 +16,6 @@ except ImportError: colorama = None -try: - import systemd.journal as journal -except ImportError: - journal = None - _MISSING = "{who} requires the {package} package installed." _EVENT_WIDTH = 30 # pad the event name to so many characters diff --git a/src/backy/main.py b/src/backy/main.py index 681ec319..dc5702bf 100644 --- a/src/backy/main.py +++ b/src/backy/main.py @@ -1,6 +1,7 @@ # -*- encoding: utf-8 -*- import argparse +import asyncio import datetime import errno import sys @@ -9,7 +10,8 @@ import humanize import structlog import tzlocal -from prettytable import PrettyTable +from rich import print as rprint +from rich.table import Column, Table from structlog.stdlib import BoundLogger import backy.backup @@ -17,6 +19,7 @@ from backy.utils import format_datetime_local from . import logging +from .client import APIClient, CLIClient def valid_date(s): @@ -44,12 +47,15 @@ def status(self): total_bytes = 0 tz = tzlocal.get_localzone() - t = PrettyTable( - [f"Date ({tz})", "ID", "Size", "Duration", "Tags", "Trust"] + t = Table( + f"Date ({tz})", + "ID", + Column("Size", justify="right"), + Column("Duration", justify="right"), + "Tags", + "Trust", + "Location", ) - t.align = "l" - t.align["Size"] = "r" - t.align["Durat"] = "r" for r in b.history: total_bytes += r.stats.get("bytes_written", 0) @@ -60,19 +66,18 @@ def status(self): duration = "-" t.add_row( - [ - format_datetime_local(r.timestamp)[0], - r.uuid, - humanize.naturalsize( - r.stats.get("bytes_written", 0), binary=True - ), - duration, - ",".join(r.tags), - r.trust.value, - ] + format_datetime_local(r.timestamp)[0], + r.uuid, + humanize.naturalsize( + r.stats.get("bytes_written", 0), binary=True + ), + duration, + ",".join(r.tags), + r.trust.value, + r.location, ) - print(t) + rprint(t) print( "{} revisions containing {} data (estimated)".format( @@ -108,9 +113,6 @@ def forget(self, revision): def scheduler(self, config): backy.daemon.main(config, self.log) - def check(self, config): - backy.daemon.check(config, self.log) - def purge(self): b = backy.backup.Backup(self.path, self.log) b.purge() @@ -131,6 +133,39 @@ def verify(self, revision): b = backy.backup.Backup(self.path, self.log) b.verify(revision) + def client(self, config, server, host, port, token, apifunc, **kwargs): + async def run(): + if host and port and token: + api = APIClient("", host, port, token, self.log) + else: + d = backy.daemon.BackyDaemon(config, self.log) + d._read_config() + api = APIClient.from_conf( + server or "", d.api_servers[server], self.log + ) + async with CLIClient(api, self.log) as c: + await getattr(c, apifunc)(**kwargs) + + asyncio.run(run()) + + def tags(self, action, autoremove, expect, revision, tags, force): + tags = set(t.strip() for t in tags.split(",")) + if expect is not None: + expect = set(t.strip() for t in expect.split(",")) + b = backy.backup.Backup(self.path, self.log) + b.tags( + action, + revision, + tags, + expect=expect, + autoremove=autoremove, + force=force, + ) + + def expire(self): + b = backy.backup.Backup(self.path, self.log) + b.expire() + def setup_argparser(): parser = argparse.ArgumentParser( @@ -148,7 +183,7 @@ def setup_argparser(): default=argparse.SUPPRESS, help=( "file name to write log output in. " - "(default: /var/log/backy.log for `scheduler` and `check`, " + "(default: /var/log/backy.log for `scheduler`, " "$backupdir/backy.log otherwise)" ), ) @@ -165,6 +200,64 @@ def setup_argparser(): subparsers = parser.add_subparsers() + # CLIENT + client = subparsers.add_parser( + "client", + help="""\ +Query the api +""", + ) + g = client.add_argument_group() + g.add_argument("-c", "--config", default="/etc/backy.conf") + g.add_argument("-s", "--server", default="") + g = client.add_argument_group() + g.add_argument("--host") + g.add_argument("--port", type=int) + g.add_argument("--token") + client.set_defaults(func="client") + client_parser = client.add_subparsers() + + # CLIENT jobs + p = client_parser.add_parser("jobs", help="List status of all known jobs") + p.add_argument( + "filter_re", + default="", + metavar="[filter]", + nargs="?", + help="Optional job filter regex", + ) + p.set_defaults(apifunc="jobs") + + # CLIENT status + p = client_parser.add_parser("status", help="Show job status overview") + p.set_defaults(apifunc="status") + + # CLIENT run + p = client_parser.add_parser( + "run", help="Trigger immediate run for one job" + ) + p.add_argument("job", metavar="", help="Name of the job to run") + p.set_defaults(apifunc="run") + + # CLIENT runall + p = client_parser.add_parser( + "runall", help="Trigger immediate run for all jobs" + ) + p.set_defaults(apifunc="runall") + + # CLIENT reload + p = client_parser.add_parser("reload", help="Reload the configuration") + p.set_defaults(apifunc="reload") + + # CLIENT check + p = client_parser.add_parser( + "check", + help="""\ +Check whether all jobs adhere to their schedules' SLA. +""", + ) + p.set_defaults(apifunc="check") + # BACKUP p = subparsers.add_parser( "backup", @@ -277,16 +370,6 @@ def setup_argparser(): p.set_defaults(func="scheduler") p.add_argument("-c", "--config", default="/etc/backy.conf") - # SCHEDULE CHECK - p = subparsers.add_parser( - "check", - help="""\ -Check whether all jobs adhere to their schedules' SLA. -""", - ) - p.set_defaults(func="check") - p.add_argument("-c", "--config", default="/etc/backy.conf") - # DISTRUST p = subparsers.add_parser( "distrust", @@ -350,6 +433,55 @@ def setup_argparser(): ) p.set_defaults(func="forget") + # TAGS + p = subparsers.add_parser( + "tags", + help="""\ + Modify tags on revision. + """, + ) + p.add_argument( + "--autoremove", + action="store_true", + help="""\ + Remove revision if no tags remain + """, + ) + p.add_argument( + "-f", "--force", action="store_true", help="Do not validate tags" + ) + p.add_argument( + "--expect", + metavar="", + help="""\ + Do nothing if tags differ from the expected tags + """, + ) + p.add_argument( + "action", + choices=["set", "add", "remove"], + ) + p.add_argument( + "revision", + metavar="SPEC", + help="set tags for revision SPEC", + ) + p.add_argument( + "tags", + metavar="", + help="comma separated list of tags", + ) + p.set_defaults(func="tags") + + # EXPIRE + p = subparsers.add_parser( + "expire", + help="""\ + Expire tags according to schedule + """, + ) + p.set_defaults(func="expire") + return parser @@ -364,10 +496,9 @@ def main(): if not hasattr(args, "logfile"): args.logfile = None - is_daemon = args.func == "scheduler" or args.func == "check" default_logfile = ( Path("/var/log/backy.log") - if is_daemon + if args.func == "scheduler" else args.backupdir / "backy.log" ) @@ -375,7 +506,10 @@ def main(): logging.init_logging( args.verbose, args.logfile or default_logfile, - default_job_name="-" if is_daemon else "", + default_job_name="-" + if args.func == "scheduler" + or (args.func == "client" and args.apifunc == "check") + else "", ) log = structlog.stdlib.get_logger(subsystem="command") log.debug("invoked", args=" ".join(sys.argv)) diff --git a/src/backy/revision.py b/src/backy/revision.py index 6a81c824..9ab9a89b 100644 --- a/src/backy/revision.py +++ b/src/backy/revision.py @@ -37,8 +37,10 @@ class Revision(object): parent: Optional[str] = None stats: dict tags: set[str] + orig_tags: set[str] trust: Trust = Trust.TRUSTED backend_type: str = "chunked" + location: str = "" log: BoundLogger def __init__(self, backup, log, uuid=None, timestamp=None): @@ -47,6 +49,7 @@ def __init__(self, backup, log, uuid=None, timestamp=None): self.timestamp = timestamp if timestamp else now() self.stats = {"bytes_written": 0} self.tags = set() + self.orig_tags = set() self.log = log.bind(revision_uuid=self.uuid, subsystem="revision") @classmethod @@ -69,13 +72,21 @@ def open(self, mode="rb"): def load(cls, filename, backup, log): with open(filename, encoding="utf-8") as f: metadata = yaml.safe_load(f) - assert metadata["timestamp"].tzinfo == datetime.timezone.utc - r = Revision( - backup, log, uuid=metadata["uuid"], timestamp=metadata["timestamp"] - ) + r = cls.from_dict(metadata, backup, log) + return r + + @classmethod + def from_dict(cls, metadata, backup, log): + ts = metadata["timestamp"] + if isinstance(ts, str): + ts = datetime.datetime.fromisoformat(ts) + assert ts.tzinfo == datetime.timezone.utc + r = Revision(backup, log, uuid=metadata["uuid"], timestamp=ts) r.parent = metadata["parent"] r.stats = metadata.get("stats", {}) r.tags = set(metadata.get("tags", [])) + r.orig_tags = set(metadata.get("orig_tags", [])) + r.location = metadata.get("location", "") # Assume trusted by default to support migration r.trust = Trust(metadata.get("trust", Trust.TRUSTED.value)) # If the metadata does not show the backend type, then it's cowfile. @@ -98,18 +109,27 @@ def materialize(self): def write_info(self): self.log.debug("writing-info", tags=", ".join(self.tags)) - metadata = { + metadata = self.to_dict() + with SafeFile(self.info_filename, encoding="utf-8") as f: + f.open_new("wb") + yaml.safe_dump(metadata, f) + + def to_dict(self): + return { "uuid": self.uuid, "backend_type": self.backend_type, - "timestamp": self.timestamp, + "timestamp": self.timestamp.isoformat(), "parent": self.parent, "stats": self.stats, "trust": self.trust.value, "tags": list(self.tags), + "orig_tags": list(self.orig_tags), + "location": self.location, } - with SafeFile(self.info_filename, encoding="utf-8") as f: - f.open_new("wb") - yaml.safe_dump(metadata, f) + + @property + def pending_changes(self): + return self.location and self.tags != self.orig_tags def set_link(self, name): path = self.backup.path @@ -124,16 +144,22 @@ def verify(self): self.log.info("verified") self.trust = Trust.VERIFIED - def remove(self): + def remove(self, force=False): self.log.info("remove") - for filename in glob.glob(self.filename + "*"): - if os.path.exists(filename): - self.log.debug("remove-start", filename=filename) - os.remove(filename) - self.log.debug("remove-end", filename=filename) - - if self in self.backup.history: - self.backup.history.remove(self) + if not force and self.location: + self.log.debug("remove-remote", location=self.location) + self.tags = set() + self.materialize() + else: + for filename in glob.glob(self.filename + "*"): + if os.path.exists(filename): + self.log.debug("remove-start", filename=filename) + os.remove(filename) + self.log.debug("remove-end", filename=filename) + + if self in self.backup.history: + self.backup.history.remove(self) + del self.backup._by_uuid[self.uuid] def writable(self): if os.path.exists(self.filename): diff --git a/src/backy/schedule.py b/src/backy/schedule.py index 6eea98d9..368bc0dc 100644 --- a/src/backy/schedule.py +++ b/src/backy/schedule.py @@ -61,6 +61,9 @@ def configure(self, config): for tag, spec in self.schedule.items(): self.schedule[tag]["interval"] = parse_duration(spec["interval"]) + def to_dict(self): + return self.config + def next(self, relative, spread, archive): time, tags = ideal_time, ideal_tags = self._next_ideal(relative, spread) missed_tags = self._missed(archive) diff --git a/src/backy/scheduler.py b/src/backy/scheduler.py index e09b6cfa..272f6a93 100644 --- a/src/backy/scheduler.py +++ b/src/backy/scheduler.py @@ -44,12 +44,12 @@ def __init__(self, daemon, name, log): self.name = name self.log = log.bind(job_name=name, subsystem="job") self.run_immediately = asyncio.Event() + self.path = p.join(self.daemon.base_dir, self.name) + self.logfile = p.join(self.path, "backy.log") def configure(self, config): self.source = config["source"] self.schedule_name = config["schedule"] - self.path = p.join(self.daemon.base_dir, self.name) - self.logfile = p.join(self.path, "backy.log") self.update_config() self.backup = Backup(self.path, self.log) self.last_config = config @@ -113,6 +113,14 @@ def update_config(self): if p.exists(config) and filecmp.cmp(config, f.name): raise ValueError("not changed") + def to_dict(self): + return { + "name": self.name, + "status": self.status, + "source": self.source, + "schedule": self.schedule.to_dict(), + } + async def _wait_for_deadline(self): self.update_status("waiting for deadline") trigger = await time_or_event(self.next_time, self.run_immediately) @@ -182,7 +190,9 @@ async def run_forever(self): self.update_config() await self.run_backup(next_tags) + await self.pull_metadata() await self.run_expiry() + await self.push_metadata() await self.run_purge() except asyncio.CancelledError: raise @@ -205,6 +215,12 @@ async def run_forever(self): self.backoff = 0 self.update_status("finished") + async def pull_metadata(self): + await self.backup.pull_metadata(self.daemon.api_servers) + + async def push_metadata(self): + await self.backup.push_metadata(self.daemon.api_servers) + async def run_backup(self, tags): self.log.info("backup-started", tags=", ".join(tags)) proc = await asyncio.create_subprocess_exec( @@ -241,8 +257,38 @@ async def run_backup(self, tags): raise async def run_expiry(self): - self.log.info("expiring-revs") - self.schedule.expire(self.backup) + self.log.info("expiry-started") + proc = await asyncio.create_subprocess_exec( + BACKY_CMD, + "-b", + self.path, + "-l", + self.logfile, + "expire", + close_fds=True, + start_new_session=True, # Avoid signal propagation like Ctrl-C + stdin=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + try: + return_code = await proc.wait() + self.log.info( + "expiry-finished", + return_code=return_code, + subprocess_pid=proc.pid, + ) + if return_code: + raise RuntimeError( + f"Expiry failed with return code {return_code}" + ) + except asyncio.CancelledError: + self.log.warning("expiry-cancelled") + try: + proc.terminate() + except ProcessLookupError: + pass + raise async def run_purge(self): self.log.info("purge-started") diff --git a/src/backy/tests/__init__.py b/src/backy/tests/__init__.py index ac475832..eef3ef5b 100644 --- a/src/backy/tests/__init__.py +++ b/src/backy/tests/__init__.py @@ -100,3 +100,6 @@ def __eq__(self, other): assert isinstance(other, str) report = self.compare(other) return report.is_ok + + def __repr__(self): + return "\n".join(self.patterns) diff --git a/src/backy/tests/test_backup.py b/src/backy/tests/test_backup.py index a9cd1dd6..55f88250 100644 --- a/src/backy/tests/test_backup.py +++ b/src/backy/tests/test_backup.py @@ -1,10 +1,8 @@ import os.path import pytest -import yaml import backy.utils -from backy.backup import Backup from backy.revision import Revision from backy.sources.file import File diff --git a/src/backy/tests/test_backy.py b/src/backy/tests/test_backy.py index df8e19c4..0621821d 100644 --- a/src/backy/tests/test_backy.py +++ b/src/backy/tests/test_backy.py @@ -140,22 +140,31 @@ def test_smoketest_external(): Diffing restore_state2.img against img_state2.img. Success. Restoring img_state1.img from level 3. Done. Diffing restore_state1.img against img_state1.img. Success. -+----------------------+------------------------+-----------+----------+-------------+---------+ -| Date (...) | ID | Size | Duration | Tags | Trust | -+----------------------+------------------------+-----------+----------+-------------+---------+ -| ... | ... | 512.0 KiB | a moment | manual:test | trusted | -| ... | ... | 512.0 KiB | a moment | daily | trusted | -| ... | ... | 512.0 KiB | a moment | test | trusted | -| ... | ... | 512.0 KiB | a moment | manual:test | trusted | -+----------------------+------------------------+-----------+----------+-------------+---------+ +┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━┓ +┃ Date ┃ ┃ ┃ ┃ ┃ ┃ ┃ +┃ ... ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Location ┃ +┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━┩ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ daily │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ test │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +└───────────┴───────────┴───────────┴──────────┴──────────┴─────────┴──────────┘ 4 revisions containing 2.0 MiB data (estimated) -+----------------------+------------------------+-----------+----------+-------------+---------+ -| Date (...) | ID | Size | Duration | Tags | Trust | -+----------------------+------------------------+-----------+----------+-------------+---------+ -| ... | ... | 512.0 KiB | a moment | manual:test | trusted | -| ... | ... | 512.0 KiB | a moment | test | trusted | -| ... | ... | 512.0 KiB | a moment | manual:test | trusted | -+----------------------+------------------------+-----------+----------+-------------+---------+ +┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━┓ +┃ Date ┃ ┃ ┃ ┃ ┃ ┃ ┃ +┃ ... ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Location ┃ +┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━┩ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ test │ trusted │ │ +│ ... │ │ │ │ │ │ │ +│ ... │ ... │ 512.0 KiB │ a moment │ manual:… │ trusted │ │ +│ ... │ │ │ │ │ │ │ +└───────────┴───────────┴───────────┴──────────┴──────────┴─────────┴──────────┘ 3 revisions containing 1.5 MiB data (estimated) """ ) diff --git a/src/backy/tests/test_daemon.py b/src/backy/tests/test_daemon.py index 52412e18..dd537530 100644 --- a/src/backy/tests/test_daemon.py +++ b/src/backy/tests/test_daemon.py @@ -3,19 +3,31 @@ import os import os.path as p import re -import time +import ssl from unittest import mock import pytest +import yaml +from aiohttp import ClientResponseError, hdrs +from aiohttp.test_utils import unused_port +from aiohttp.web_exceptions import HTTPUnauthorized +from yarl import URL from backy import utils +from backy.api import BackyAPI from backy.backends.chunked import ChunkedFileBackend +from backy.client import APIClient, CLIClient from backy.daemon import BackyDaemon from backy.revision import Revision from backy.scheduler import Job from backy.tests import Ellipsis +@pytest.fixture(autouse=True) +def configure_logging(setup_structlog): + setup_structlog.default_job_name = "-" + + @pytest.fixture async def daemon(tmpdir, event_loop, log): daemon = BackyDaemon(str(tmpdir / "config"), log) @@ -23,11 +35,14 @@ async def daemon(tmpdir, event_loop, log): source = str(tmpdir / "test01.source") with open(str(tmpdir / "config"), "w") as f: f.write( - """\ + f"""\ --- global: base-dir: {base_dir} - status-interval: 1 +api: + tokens: + "testtoken": "cli" + "testtoken2": "cli2" schedules: default: daily: @@ -44,9 +59,7 @@ async def daemon(tmpdir, event_loop, log): type: file filename: {source} schedule: default -""".format( - **locals() - ) +""" ) with open(source, "w") as f: @@ -63,7 +76,45 @@ def test_fail_on_nonexistent_config(log): daemon._read_config() -@pytest.mark.asyncio +def test_reload(daemon, tmpdir): + new_base_dir = p.join(tmpdir, "newdir") + os.mkdir(new_base_dir) + with open(str(tmpdir / "config"), "w") as f: + f.write( + f"""\ +--- +global: + base-dir: {new_base_dir} +api: + tokens: + "newtoken": "cli" + "newtoken2": "cli2" +schedules: + default2: + daily: + interval: 24h + keep: 13 +jobs: + test05: + source: + type: file + filename: {daemon.jobs["test01"].source} + schedule: default2 + foo05: + source: + type: file + filename: {daemon.jobs["foo00"].source} + schedule: default2 +""" + ) + daemon.reload() + assert daemon.base_dir == new_base_dir + assert set(daemon.api_tokens) == {"newtoken", "newtoken2"} + assert set(daemon.jobs) == {"test05", "foo05"} + assert set(daemon.schedules) == {"default2"} + + +@pytest.fixture async def test_run_backup(daemon, log): job = daemon.jobs["test01"] @@ -166,12 +217,8 @@ def test_incomplete_revs_dont_count_for_sla(daemon, clock, tmpdir, log): assert False is job.sla -def test_status_should_default_to_basedir(daemon, tmpdir): - assert str(tmpdir / "status") == daemon.status_file - - -def test_update_status(log): - job = Job(mock.Mock(), "asdf", log) +def test_update_status(daemon, log): + job = Job(daemon, "asdf", log) assert job.status == "" job.update_status("asdf") assert job.status == "asdf" @@ -183,7 +230,7 @@ async def cancel_and_wait(job): job._task = None -@pytest.mark.asyncio +@pytest.fixture async def test_task_generator(daemon, clock, tmpdir, monkeypatch, tz_berlin): # This is really just a smoke tests, but it covers the task pool, # so hey, better than nothing. @@ -213,7 +260,7 @@ async def wait_for_job_finished(): await wait_for_job_finished() -@pytest.mark.asyncio +@pytest.fixture async def test_task_generator_backoff( daemon, clock, tmpdir, monkeypatch, tz_berlin ): @@ -290,6 +337,8 @@ async def wait_for_job_finished(): exception>\tException ... W test01 job/backoff backoff=480 ... I test01 job/waiting next_tags='daily' next_time='2015-09-01 09:14:47' +... I test01 backup/pull-metadata-start \n\ +... I test01 backup/push-metadata-start \n\ ... I test01 job/stop \n\ ... I test01 job/waiting next_tags='daily' next_time='2015-09-02 07:32:51' """ @@ -301,15 +350,6 @@ async def wait_for_job_finished(): assert job.backoff == 0 -@pytest.mark.asyncio -async def test_write_status_file(daemon, event_loop): - assert p.exists(daemon.status_file) - first = os.stat(daemon.status_file) - await asyncio.sleep(1.5) - second = os.stat(daemon.status_file) - assert first.st_mtime < second.st_mtime - - def test_daemon_status(daemon): assert {"test01", "foo00"} == set([s["job"] for s in daemon.status()]) @@ -319,114 +359,569 @@ def test_daemon_status_filter_re(daemon): assert {"foo00"} == set([s["job"] for s in daemon.status(r)]) -def test_check_ok(daemon, setup_structlog): - setup_structlog.default_job_name = "-" - daemon._write_status_file() +async def test_check_ok(daemon, cli_client): utils.log_data = "" - try: - daemon.check() - except SystemExit as exit: - assert exit.code == 0 + await cli_client.check() assert ( Ellipsis( """\ -... I - daemon/read-config ... -... I - daemon/check-within-sla num=2 +... D - api/new-conn path='/status' query='filter=' +... D - api/auth-passed client='cli' path='/status' query='filter=' +... D - api/request-result client='cli' path='/status' query='filter=' response=... +... I - CLIClient/check-within-sla num=2 """ ) == utils.log_data ) -def test_check_too_old(daemon, tmpdir, clock, log, setup_structlog): - setup_structlog.default_job_name = "-" +async def test_check_too_old(daemon, clock, cli_client, log): job = daemon.jobs["test01"] revision = Revision(job.backup, log, "1") revision.timestamp = utils.now() - datetime.timedelta(hours=48) revision.stats["duration"] = 60.0 revision.materialize() - daemon._write_status_file() utils.log_data = "" try: - daemon.check() - except SystemExit as exit: - assert exit.code == 2 + await cli_client.check() + except RuntimeError as e: + assert str(e) == "SLA Violation" assert ( Ellipsis( """\ -... I - daemon/read-config ... -... C test01 daemon/check-sla-violation last_time='2015-08-30 07:06:47+00:00' sla_overdue=172800.0 -... D - daemon/check-jobs-failed failed_jobs=1 +... D - api/new-conn path='/status' query='filter=' +... D - api/auth-passed client='cli' path='/status' query='filter=' +... D - api/request-result client='cli' path='/status' query='filter=' response=... +... C test01 CLIClient/check-sla-violation last_time='2015-08-30 07:06:47+00:00' sla_overdue=172800.0 +... D - CLIClient/check-jobs-failed failed_jobs=1 """ ) == utils.log_data ) -def test_check_manual_tags(daemon, setup_structlog, log): - setup_structlog.default_job_name = "-" +async def test_check_manual_tags(daemon, cli_client, log): job = daemon.jobs["test01"] revision = Revision.create(job.backup, {"manual:test"}, log) revision.timestamp = utils.now() revision.stats["duration"] = 60.0 revision.materialize() - daemon._write_status_file() utils.log_data = "" - try: - daemon.check() - except SystemExit as exit: - assert exit.code == 0 + await cli_client.check() assert ( Ellipsis( """\ -... I - daemon/read-config ... -... I test01 daemon/check-manual-tags manual_tags='manual:test' -... I - daemon/check-within-sla num=2 +... D - api/new-conn path='/status' query='filter=' +... D - api/auth-passed client='cli' path='/status' query='filter=' +... D - api/request-result client='cli' path='/status' query='filter=' response=... +... I test01 CLIClient/check-manual-tags manual_tags='manual:test' +... I - CLIClient/check-within-sla num=2 """ ) == utils.log_data ) -def test_check_no_status_file(daemon, setup_structlog): - setup_structlog.default_job_name = "-" - os.unlink(daemon.status_file) +### API - utils.log_data = "" - try: - daemon.check() - except SystemExit as exit: - assert exit.code == 3 + +@pytest.fixture +async def api(daemon, log): + api = BackyAPI(daemon, log) + api.tokens = daemon.api_tokens + return api + + +@pytest.mark.parametrize( + "token", + [ + None, + "", + "asdf", + "Bearer", + "Bearer ", + "Bearer a", + "Bearer asdf", + "Bearer cli", + ], +) +@pytest.mark.parametrize("endpoint", ["/invalid", "/status"]) +@pytest.mark.parametrize( + "method", [hdrs.METH_GET, hdrs.METH_POST, hdrs.METH_TRACE] +) +async def test_api_wrong_token(api, token, method, endpoint, aiohttp_client): + client = await aiohttp_client( + api.app, + headers={hdrs.AUTHORIZATION: token} if token is not None else {}, + raise_for_status=True, + ) + with pytest.raises(ClientResponseError) as e: + await client.request(method, endpoint) + assert e.value.status == HTTPUnauthorized.status_code + + +@pytest.fixture +async def api_client(api, aiohttp_client, log): + client = await aiohttp_client( + api.app, + headers={hdrs.AUTHORIZATION: "Bearer testtoken"}, + raise_for_status=True, + ) + api_client = APIClient("", "localhost", 0, "", log) + await api_client.session.close() + api_client.session = client + return api_client + + +@pytest.fixture +async def cli_client(api_client, log): + return CLIClient(api_client, log) + + +async def test_cli_jobs(cli_client, capsys): + await cli_client.jobs() + out, err = capsys.readouterr() assert ( Ellipsis( """\ -... I - daemon/read-config ... -... E - daemon/check-no-status-file status_file='...' +┏━━━━━━━━┳━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━┓ +┃ ┃ ┃ ┃ ┃ Last ┃ ┃ ┃ Next ┃ ┃ +┃ ┃ ┃ SLA ┃ ┃ Backup ┃ Last ┃ Last ┃ Backup ┃ Next ┃ +┃ Job ┃ SLA ┃ overd… ┃ Status ┃ ... ┃ Tags ┃ Durat… ┃ ... ┃ Tags ┃ +┡━━━━━━━━╇━━━━━╇━━━━━━━━╇━━━━━━━━╇━━━━━━━━╇━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━┩ +│ foo00 │ OK │ - │ waiti… │ - │ │ - │ ... │ daily │ +│ │ │ │ for │ │ │ │ ... │ │ +│ │ │ │ deadl… │ │ │ │ │ │ +│ test01 │ OK │ - │ waiti… │ - │ │ - │ ... │ daily │ +│ │ │ │ for │ │ │ │ ... │ │ +│ │ │ │ deadl… │ │ │ │ │ │ +└────────┴─────┴────────┴────────┴────────┴────────┴────────┴─────────┴────────┘ +2 jobs shown """ ) - == utils.log_data + == out + ) + + await cli_client.jobs(filter_re="test01") + out, err = capsys.readouterr() + assert ( + Ellipsis( + """\ +┏━━━━━━━━┳━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━┓ +┃ ┃ ┃ ┃ ┃ Last ┃ ┃ ┃ Next ┃ ┃ +┃ ┃ ┃ SLA ┃ ┃ Backup ┃ Last ┃ Last ┃ Backup ┃ Next ┃ +┃ Job ┃ SLA ┃ overd… ┃ Status ┃ ... ┃ Tags ┃ Durat… ┃ ... ┃ Tags ┃ +┡━━━━━━━━╇━━━━━╇━━━━━━━━╇━━━━━━━━╇━━━━━━━━╇━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━┩ +│ test01 │ OK │ - │ waiti… │ - │ │ - │ ... │ daily │ +│ │ │ │ for │ │ │ │ ... │ │ +│ │ │ │ deadl… │ │ │ │ │ │ +└────────┴─────┴────────┴────────┴────────┴────────┴────────┴─────────┴────────┘ +1 jobs shown +""" + ) + == out ) + await cli_client.jobs(filter_re="asdf") + out, err = capsys.readouterr() + assert ( + """\ +┏━━━━━┳━━━━━┳━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━┓ +┃ ┃ ┃ ┃ ┃ Last ┃ ┃ ┃ Next ┃ ┃ +┃ ┃ ┃ SLA ┃ ┃ Backup ┃ Last ┃ Last ┃ Backup ┃ Next ┃ +┃ Job ┃ SLA ┃ overdue ┃ Status ┃ (Europ… ┃ Tags ┃ Durat… ┃ (Europ… ┃ Tags ┃ +┡━━━━━╇━━━━━╇━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━┩ +└─────┴─────┴─────────┴────────┴─────────┴─────────┴────────┴─────────┴────────┘ +0 jobs shown +""" + == out + ) + + +async def test_cli_status(cli_client, capsys): + await cli_client.status() + out, err = capsys.readouterr() + assert ( + """\ +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━┓ +┃ Status ┃ # ┃ +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━┩ +│ waiting for deadline │ 2 │ +└──────────────────────┴───┘ +""" + == out + ) -def test_check_stale_status_file(daemon, setup_structlog): - setup_structlog.default_job_name = "-" - open(daemon.status_file, "a").close() - os.utime(daemon.status_file, (time.time() - 301, time.time() - 301)) +async def test_cli_run(daemon, cli_client, capsys, monkeypatch): + run = mock.Mock() + monkeypatch.setattr(daemon.jobs["test01"].run_immediately, "set", run) + + await cli_client.run("test01") + + run.assert_called_once() + out, err = capsys.readouterr() + assert ( + """\ +Triggered immediate run for test01 +""" + == out + ) + + +async def test_cli_runall(daemon, cli_client, capsys, monkeypatch): + run1 = mock.Mock() + run2 = mock.Mock() + monkeypatch.setattr(daemon.jobs["test01"].run_immediately, "set", run1) + monkeypatch.setattr(daemon.jobs["foo00"].run_immediately, "set", run2) + + await cli_client.runall() + + run1.assert_called_once() + run2.assert_called_once() + out, err = capsys.readouterr() + assert ( + """\ +Triggered immediate run for test01 +Triggered immediate run for foo00 +""" + == out + ) + + +async def test_cli_reload(daemon, cli_client, capsys, monkeypatch): + reload = mock.Mock() + monkeypatch.setattr(daemon, "reload", reload) + + await cli_client.reload() + + reload.assert_called_once() + out, err = capsys.readouterr() + assert ( + """\ +Triggering daemon reload. +Daemon configuration reloaded. +""" + == out + ) + + +async def test_cli_check(daemon, cli_client, capsys): utils.log_data = "" - try: - daemon.check() - except SystemExit as exit: - assert exit.code == 2 + await cli_client.check() + + out, err = capsys.readouterr() + assert "" == out assert ( Ellipsis( """\ -... I - daemon/read-config ... -... C - daemon/check-old-status-file age=... +... D - api/new-conn path='/status' query='filter=' +... D - api/auth-passed client='cli' path='/status' query='filter=' +... D - api/request-result client='cli' path='/status' query='filter=' response=... +... I - CLIClient/check-within-sla num=2 """ ) == utils.log_data ) + + +async def test_purge_pending(daemon, monkeypatch): + run_purge = mock.Mock() + monkeypatch.setattr("backy.scheduler.Job.run_purge", run_purge) + monkeypatch.setattr( + "asyncio.sleep", mock.Mock(side_effect=asyncio.CancelledError()) + ) + + daemon.jobs["test01"].backup.set_purge_pending() + del daemon.jobs["test01"] + + with pytest.raises(asyncio.CancelledError): + await daemon.purge_pending_backups() + + run_purge.assert_called_once() + + +async def wait_api_ready(daemon): + while daemon.reload_api.is_set(): + await asyncio.sleep(0.1) + + +@pytest.fixture(autouse=True) +async def use_http(monkeypatch): + orig_build_url = URL.build + + def build_http_url(scheme="", **kwargs): + return orig_build_url(scheme="http", **kwargs) + + monkeypatch.setattr(URL, "build", build_http_url) + + +async def test_https(daemons, monkeypatch, log): + monkeypatch.undo() + d = await daemons(2) + async with APIClient.from_conf( + "sever-0", d[1].api_servers["server-0"], log + ) as api: + with pytest.raises(ssl.SSLError): + await api.get_jobs() + + +@pytest.fixture +async def daemons(tmpdir, event_loop, log, monkeypatch): + daemons: list[BackyDaemon] = [] # type: ignore + + async def create_daemons(count): + ports = [unused_port() for _ in range(count)] + for i in range(count): + daemon_dir = tmpdir / f"daemon{i}" + os.mkdir(daemon_dir) + daemon = BackyDaemon( + str(daemon_dir / "config"), log.bind(logger=f"server-{i}") + ) + source = str(daemon_dir / "test01.source") + api = { + "api": { + "addrs": "localhost", + "port": ports[i], + "tokens": { + f"authtoken-{j}-{i}": f"server-{j}" + for j in range(count) + if i != j + }, + "servers": { + f"server-{j}": { + "host": "localhost", + "port": ports[j], + "token": f"authtoken-{i}-{j}", + } + for j in range(count) + if i != j + }, + } + } + with open(str(daemon_dir / "config"), "w") as f: + f.write( + f"""\ +--- +global: + base-dir: {str(daemon_dir)} +schedules: + default: + daily: + interval: 24h + keep: 9 +jobs: + test01: + source: + type: file + filename: {source} + schedule: default + foo00: + source: + type: file + filename: {source} + schedule: default +""" + + yaml.safe_dump(api) + ) + + with open(source, "w") as f: + f.write("I am your father, Luke!") + + def fake_create_task(coro, *args, **kwargs): + coro.close() + return None + + with monkeypatch.context() as m: + m.setattr(event_loop, "create_task", fake_create_task) + daemon.start(event_loop) + daemon.api_server() + + await wait_api_ready(daemon) + + daemons.append(daemon) + + return daemons + + yield create_daemons + + for d in daemons: + d.terminate() + + +def create_rev(backup, log): + rev = Revision.create(backup, {"manual:a"}, log) + rev.timestamp = utils.now() + rev.stats["duration"] = 60.0 + rev.materialize() + backup.scan() + return rev + + +async def modify_authtokens( + daemons: list[BackyDaemon], + src: list[int], + dest: list[int], + allow: bool, + bidirectional=False, +): + for d in dest: + for s in src: + if allow: + daemons[d].api_tokens[f"authtoken-{s}-{d}"] = f"server-{s}" + else: + daemons[d].api_tokens.pop(f"authtoken-{s}-{d}", None) + daemons[d].reload_api.set() + await wait_api_ready(daemons[d]) + if bidirectional: + await modify_authtokens(daemons, dest, src, allow) + + +async def test_simple_sync(daemons, log): + ds = await daemons(3) + + j0 = ds[0].jobs["test01"] + b0 = j0.backup + rev0 = create_rev(b0, log) + + j1 = ds[1].jobs["test01"] + b1 = j1.backup + rev1 = create_rev(b1, log) + + ds[2].api_addrs = "" + ds[2].reload_api.set() + await wait_api_ready(ds[2]) + + assert [r.uuid for r in b0.history] == [rev0.uuid] + + await j0.pull_metadata() + b0.scan() + + assert [r.uuid for r in b0.history] == [rev0.uuid] + + del ds[1].jobs["test01"] + await j0.pull_metadata() + b0.scan() + + assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] + new_rev1 = b0.history[1] + assert new_rev1.backup == b0 + assert new_rev1.timestamp == rev1.timestamp + assert new_rev1.parent is None + assert new_rev1.backend_type == "" + assert new_rev1.stats == rev1.stats + assert new_rev1.tags == rev1.tags + assert new_rev1.orig_tags == new_rev1.tags + assert new_rev1.trust == rev1.trust + assert new_rev1.location == "server-1" + + new_rev1.remove() + assert [r.uuid for r in b0.history] == [rev0.uuid, rev1.uuid] + assert new_rev1.tags == set() + assert new_rev1.orig_tags == rev1.tags + + await j0.push_metadata() + b0.scan() + b1.scan() + + assert [r.uuid for r in b0.history] == [rev0.uuid] + assert [r.uuid for r in b1.history] == [] + assert p.exists(p.join(j1.path, ".purge_pending")) + + +async def test_split_brain(daemons, log): + ds = await daemons(4) + + await modify_authtokens(ds, [0, 1], [2, 3], allow=False, bidirectional=True) + + js = [d.jobs["test01"] for d in ds] + bs = [j.backup for j in js] + revs = [create_rev(b, log) for b in bs] + + for b, r in zip(bs, revs): + assert [r.uuid for r in b.history] == [r.uuid] + + for j in js: + await j.pull_metadata() + j.backup.scan() + + for b, r in zip(bs, revs): + assert [r.uuid for r in b.history] == [r.uuid] + + del ds[0].jobs["test01"] + del ds[2].jobs["test01"] + + await js[1].pull_metadata() + await js[3].pull_metadata() + + bs[1].scan() + bs[3].scan() + + assert [r.uuid for r in bs[1].history] == [ + revs[0].uuid, + revs[1].uuid, + ] + assert [r.uuid for r in bs[3].history] == [ + revs[2].uuid, + revs[3].uuid, + ] + + await modify_authtokens(ds, [2, 3], [0], allow=True) + + await js[3].pull_metadata() + bs[3].scan() + + assert [r.uuid for r in bs[3].history] == [ + revs[0].uuid, + revs[2].uuid, + revs[3].uuid, + ] + + bs[1].history[0].tags.add("manual:new1") + bs[3].history[0].remove() + + await js[1].push_metadata() + await js[3].push_metadata() # fails + + bs[0].scan() + assert bs[0].history[0].tags == {"manual:new1", "manual:a"} + + await js[1].pull_metadata() + await js[3].pull_metadata() + + bs[1].scan() + bs[3].scan() + + assert [(r.uuid, r.tags) for r in bs[1].history] == [ + (revs[0].uuid, {"manual:a", "manual:new1"}), + (revs[1].uuid, {"manual:a"}), + ] + assert [(r.uuid, r.tags) for r in bs[3].history] == [ + (revs[0].uuid, {"manual:a", "manual:new1"}), + (revs[2].uuid, {"manual:a"}), + (revs[3].uuid, {"manual:a"}), + ] + + await modify_authtokens( + ds, [0, 1, 2, 3], [0, 1, 2, 3], allow=True, bidirectional=True + ) + + await js[1].pull_metadata() + await js[3].pull_metadata() + + bs[1].scan() + bs[3].scan() + + assert [(r.uuid, r.tags) for r in bs[1].history] == [ + (revs[0].uuid, {"manual:a", "manual:new1"}), + (revs[1].uuid, {"manual:a"}), + (revs[2].uuid, {"manual:a"}), + ] + assert [(r.uuid, r.tags) for r in bs[3].history] == [ + (revs[0].uuid, {"manual:a", "manual:new1"}), + (revs[2].uuid, {"manual:a"}), + (revs[3].uuid, {"manual:a"}), + ] diff --git a/src/backy/tests/test_main.py b/src/backy/tests/test_main.py index e2adde78..a76189bd 100644 --- a/src/backy/tests/test_main.py +++ b/src/backy/tests/test_main.py @@ -5,6 +5,7 @@ import pytest import backy.backup +import backy.client import backy.main from backy import utils from backy.revision import Revision @@ -28,8 +29,8 @@ def test_display_usage(capsys, argv): assert ( """\ usage: pytest [-h] [-v] [-l LOGFILE] [-b BACKUPDIR] - {backup,restore,purge,find,status,nbd-server,\ -upgrade,scheduler,check,distrust,verify,forget} + {client,backup,restore,purge,find,status,nbd-server,\ +upgrade,scheduler,distrust,verify,forget,tags,expire} ... """ == out @@ -47,8 +48,8 @@ def test_display_help(capsys, argv): Ellipsis( """\ usage: pytest [-h] [-v] [-l LOGFILE] [-b BACKUPDIR] - {backup,restore,purge,find,status,nbd-server,\ -upgrade,scheduler,check,distrust,verify,forget} + {client,backup,restore,purge,find,status,nbd-server,\ +upgrade,scheduler,distrust,verify,forget,tags,expire} ... Backup and restore for block devices. @@ -76,6 +77,10 @@ def print_args(*args, **kw): pprint.pprint(kw) +async def async_print_args(*args, **kw): + print_args(*args, **kw) + + def test_call_status(capsys, backup, argv, monkeypatch): monkeypatch.setattr(backy.main.Command, "status", print_args) argv.extend(["-v", "-b", backup.path, "status"]) @@ -182,10 +187,10 @@ def test_call_find(capsys, backup, argv, monkeypatch, tz_berlin): assert exit.value.code == 0 -def test_call_check(capsys, backup, argv, monkeypatch, tmpdir): - monkeypatch.setattr(backy.main.Command, "check", print_args) +def test_call_scheduler(capsys, backup, argv, monkeypatch, tmpdir): + monkeypatch.setattr(backy.main.Command, "scheduler", print_args) argv.extend( - ["-v", "-b", backup.path, "-l", str(tmpdir / "backy.log"), "check"] + ["-v", "-b", backup.path, "-l", str(tmpdir / "backy.log"), "scheduler"] ) with pytest.raises(SystemExit) as exit: backy.main.main() @@ -203,8 +208,8 @@ def test_call_check(capsys, backup, argv, monkeypatch, tmpdir): assert ( Ellipsis( """\ -... D command/invoked args='... -v -b ... check' -... D command/parsed func='check' func_args={'config': '/etc/backy.conf'} +... D command/invoked args='... -v -b ... scheduler' +... D command/parsed func='scheduler' func_args={'config': '/etc/backy.conf'} ... D command/successful \n\ """ ) @@ -213,11 +218,45 @@ def test_call_check(capsys, backup, argv, monkeypatch, tmpdir): assert exit.value.code == 0 -def test_call_scheduler(capsys, backup, argv, monkeypatch, tmpdir): - monkeypatch.setattr(backy.main.Command, "scheduler", print_args) - argv.extend( - ["-v", "-b", backup.path, "-l", str(tmpdir / "backy.log"), "scheduler"] +@pytest.mark.parametrize("action", ["set", "add", "remove"]) +def test_call_tags(capsys, backup, argv, monkeypatch, action): + monkeypatch.setattr(backy.main.Command, "tags", print_args) + argv.extend(["-v", "-b", backup.path, "tags", action, "last", "manual:a"]) + with pytest.raises(SystemExit) as exit: + backy.main.main() + assert exit.value.code == 0 + out, err = capsys.readouterr() + assert ( + Ellipsis( + f"""\ +(,) +{{'action': '{action}', + 'autoremove': False, + 'expect': None, + 'force': False, + 'revision': 'last', + 'tags': 'manual:a'}} +""" + ) + == out ) + assert ( + Ellipsis( + f"""\ +... D command/invoked args='... -v -b ... tags {action} last manual:a' +... D command/parsed func='tags' func_args={{'autoremove': False, 'force': False, 'expect': None, \ +'action': '{action}', 'revision': 'last', 'tags': 'manual:a'}} +... D command/successful \n\ +""" + ) + == utils.log_data + ) + assert exit.value.code == 0 + + +def test_call_expire(capsys, backup, argv, monkeypatch): + monkeypatch.setattr(backy.main.Command, "expire", print_args) + argv.extend(["-v", "-b", backup.path, "expire"]) with pytest.raises(SystemExit) as exit: backy.main.main() assert exit.value.code == 0 @@ -226,7 +265,7 @@ def test_call_scheduler(capsys, backup, argv, monkeypatch, tmpdir): Ellipsis( """\ (,) -{'config': '/etc/backy.conf'} +{} """ ) == out @@ -234,8 +273,69 @@ def test_call_scheduler(capsys, backup, argv, monkeypatch, tmpdir): assert ( Ellipsis( """\ -... D command/invoked args='... -v -b ... scheduler' -... D command/parsed func='scheduler' func_args={'config': '/etc/backy.conf'} +... D command/invoked args='... -v -b ... expire' +... D command/parsed func='expire' func_args={} +... D command/successful \n\ +""" + ) + == utils.log_data + ) + assert exit.value.code == 0 + + +@pytest.mark.parametrize( + ["action", "args"], + [ + ("jobs", {"filter_re": "test"}), + ("status", dict()), + ("run", {"job": "test"}), + ("runall", dict()), + ("reload", dict()), + ("check", dict()), + ], +) +def test_call_client_jobs( + capsys, backup, argv, monkeypatch, log, tmpdir, action, args +): + monkeypatch.setattr(backy.client.CLIClient, action, async_print_args) + conf = str(tmpdir / "conf") + with open(conf, "w") as c: + c.write( + f"""\ +global: + base-dir: {str(tmpdir)} +api: + servers: + "": + host: "localhost" + port: 1234 + token: "test" +schedules: {{}} +jobs: {{}} +""" + ) + + argv.extend(["-v", "client", "-c", conf, action, *args.values()]) + with pytest.raises(SystemExit) as exit: + backy.main.main() + assert exit.value.code == 0 + out, err = capsys.readouterr() + assert ( + Ellipsis( + f"""\ +(,) +{args} +""" + ) + == out + ) + assert ( + Ellipsis( + f"""\ +... D command/invoked args='... -v client -c ... {action}{" "*bool(args)}{", ".join(args.values())}' +... D command/parsed func='client' func_args={{'config': '...', 'server': '', 'host': None, \ +'port': None, 'token': None{", "*bool(args)}{str(args)[1:-1]}, 'apifunc': '{action}'}} +... I daemon/read-config ... ... D command/successful \n\ """ ) @@ -281,7 +381,7 @@ def do_raise(*args, **kw): def test_commands_wrapper_status(backup, tmpdir, capsys, clock, tz_berlin, log): commands = backy.main.Command(str(tmpdir), log) - revision = Revision(backup, log, 1) + revision = Revision(backup, log, "1") revision.timestamp = backy.utils.now() revision.materialize() @@ -291,11 +391,11 @@ def test_commands_wrapper_status(backup, tmpdir, capsys, clock, tz_berlin, log): assert err == "" assert out == Ellipsis( """\ -+----------------------+----+---------+----------+------+---------+ -| Date (...) | ID | Size | Duration | Tags | Trust | -+----------------------+----+---------+----------+------+---------+ -| ... | 1 | 0 Bytes | - | | trusted | -+----------------------+----+---------+----------+------+---------+ +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━┳━━━━━━━━━┳━━━━━━━━━━┳━━━━━━┳━━━━━━━━━┳━━━━━━━━━━┓ +┃ Date (Europe/Berlin) ┃ ID ┃ Size ┃ Duration ┃ Tags ┃ Trust ┃ Location ┃ +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━╇━━━━━━━━━╇━━━━━━━━━━╇━━━━━━╇━━━━━━━━━╇━━━━━━━━━━┩ +│ 2015-09-01 09:06:47 │ 1 │ 0 Bytes │ - │ │ trusted │ │ +└──────────────────────┴────┴─────────┴──────────┴──────┴─────────┴──────────┘ 1 revisions containing 0 Bytes data (estimated) """ ) diff --git a/src/backy/tests/test_revision.py b/src/backy/tests/test_revision.py index 655e5ff7..4d44d5ed 100644 --- a/src/backy/tests/test_revision.py +++ b/src/backy/tests/test_revision.py @@ -4,7 +4,7 @@ import yaml -import backy +import backy.utils from backy.revision import Revision UTC = datetime.timezone.utc @@ -72,8 +72,10 @@ def test_store_revision_data(backup, clock, log): "uuid": "asdf2", "stats": {"bytes_written": 0}, "tags": [], + "orig_tags": [], + "location": "", "trust": "trusted", - "timestamp": datetime.datetime(2015, 9, 1, 7, 6, 47, tzinfo=UTC), + "timestamp": "2015-09-01T07:06:47+00:00", } diff --git a/src/backy/tests/test_scheduler.py b/src/backy/tests/test_scheduler.py index 6fd31954..a7cadcdb 100644 --- a/src/backy/tests/test_scheduler.py +++ b/src/backy/tests/test_scheduler.py @@ -7,20 +7,25 @@ from backy.scheduler import Job -@pytest.mark.asyncio -async def test_wait_for_deadline_no_deadline_fails(log): +@pytest.fixture +def daemon(tmpdir): daemon = mock.Mock() + daemon.base_dir = str(tmpdir) + return daemon + + +@pytest.fixture +async def test_wait_for_deadline_no_deadline_fails(daemon, log): job = Job(daemon, "dummy", log) - # Not having a a deadline set causes this to fail (immediately) + # Not having a deadline set causes this to fail (immediately) with pytest.raises(TypeError): await job._wait_for_deadline() -@pytest.mark.asyncio -async def test_wait_for_deadline(log): - daemon = mock.Mock() +@pytest.fixture +async def test_wait_for_deadline(daemon, log): job = Job(daemon, "dummy", log) - # Not having a a deadline set causes this to fail. + # Not having a deadline set causes this to fail. now = backy.utils.now() job.next_time = now + datetime.timedelta(seconds=0.3) result = await job._wait_for_deadline() @@ -28,9 +33,8 @@ async def test_wait_for_deadline(log): assert backy.utils.now() - now >= datetime.timedelta(seconds=0.3) -@pytest.mark.asyncio -async def test_wait_for_deadline_1000(log): - daemon = mock.Mock() +@pytest.fixture +async def test_wait_for_deadline_1000(daemon, log): job = Job(daemon, "dummy", log) # Large deadline now = backy.utils.now() diff --git a/src/backy/utils.py b/src/backy/utils.py index c59a44f6..f79b0851 100644 --- a/src/backy/utils.py +++ b/src/backy/utils.py @@ -19,7 +19,7 @@ from .ext_deps import CP from .fallocate import punch_hole -log = structlog.stdlib.get_logger() +log = structlog.stdlib.get_logger(subsystem="utils") log_data: str # for pytest @@ -214,13 +214,13 @@ def safe_symlink(realfile, link): if hasattr(os, "posix_fadvise"): posix_fadvise = os.posix_fadvise else: # pragma: no cover - log.warning("safe-symlink-no-posix_fadivse") os.POSIX_FADV_RANDOM = None # type: ignore os.POSIX_FADV_SEQUENTIAL = None # type: ignore os.POSIX_FADV_WILLNEED = None # type: ignore os.POSIX_FADV_DONTNEED = None # type: ignore def posix_fadvise(*args, **kw): + log.debug("posix_fadivse-unavailable") return