From b1fd240a07380b6f27fbf494318f03b301a17694 Mon Sep 17 00:00:00 2001 From: Sonny Date: Fri, 19 Aug 2022 10:16:29 -0700 Subject: [PATCH] tools/c7n_mailer - support for gcp (#7538) --- poetry.lock | 302 +++++- tools/c7n_mailer/README.md | 103 +- .../azure_mailer/azure_queue_processor.py | 39 +- tools/c7n_mailer/c7n_mailer/cli.py | 31 +- tools/c7n_mailer/c7n_mailer/email_delivery.py | 29 +- .../c7n_mailer/gcp_mailer/__init__.py | 2 + .../gcp_mailer/gcp_queue_processor.py | 77 ++ .../c7n_mailer/c7n_mailer/gcp_mailer/utils.py | 29 + tools/c7n_mailer/c7n_mailer/ldap_lookup.py | 2 + tools/c7n_mailer/c7n_mailer/slack_delivery.py | 9 +- tools/c7n_mailer/c7n_mailer/smtp_delivery.py | 5 +- .../c7n_mailer/sqs_queue_processor.py | 73 +- tools/c7n_mailer/c7n_mailer/target.py | 74 ++ tools/c7n_mailer/c7n_mailer/utils.py | 19 +- tools/c7n_mailer/c7n_mailer/utils_email.py | 250 ++--- tools/c7n_mailer/poetry.lock | 557 +++++++++- tools/c7n_mailer/pyproject.toml | 4 + tools/c7n_mailer/setup.py | 4 +- tools/c7n_mailer/tests/common.py | 611 ++++++----- tools/c7n_mailer/tests/gcp/credentials.json | 6 + tools/c7n_mailer/tests/test_azure.py | 282 ++--- .../tests/test_azure_mailer_utils.py | 11 +- tools/c7n_mailer/tests/test_datadog.py | 156 +-- tools/c7n_mailer/tests/test_email.py | 304 +++--- tools/c7n_mailer/tests/test_gcp.py | 141 +++ .../c7n_mailer/tests/test_gcp_mailer_utils.py | 39 + tools/c7n_mailer/tests/test_ldap.py | 71 +- tools/c7n_mailer/tests/test_misc.py | 56 +- tools/c7n_mailer/tests/test_schema.py | 23 +- tools/c7n_mailer/tests/test_slack.py | 111 +- tools/c7n_mailer/tests/test_smtp_delivery.py | 129 ++- tools/c7n_mailer/tests/test_sns.py | 9 +- tools/c7n_mailer/tests/test_splunk.py | 981 +++++++----------- tools/c7n_mailer/tests/test_utils.py | 356 +++---- 34 files changed, 2935 insertions(+), 1960 deletions(-) create mode 100644 tools/c7n_mailer/c7n_mailer/gcp_mailer/__init__.py create mode 100644 tools/c7n_mailer/c7n_mailer/gcp_mailer/gcp_queue_processor.py create mode 100644 tools/c7n_mailer/c7n_mailer/gcp_mailer/utils.py create mode 100644 tools/c7n_mailer/c7n_mailer/target.py create mode 100644 tools/c7n_mailer/tests/gcp/credentials.json create mode 100644 tools/c7n_mailer/tests/test_gcp.py create mode 100644 tools/c7n_mailer/tests/test_gcp_mailer_utils.py diff --git a/poetry.lock b/poetry.lock index b5fde0ac0e3..11d7115cca7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -64,14 +64,14 @@ dev = ["build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "pip-tools (= [[package]] name = "boto3" -version = "1.24.44" +version = "1.24.55" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.27.44,<1.28.0" +botocore = ">=1.27.55,<1.28.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -80,7 +80,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.27.44" +version = "1.27.55" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -92,7 +92,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.13.8)"] +crt = ["awscrt (==0.14.0)"] [[package]] name = "certifi" @@ -255,8 +255,8 @@ python-versions = ">=3.7" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +testing = ["pytest-mypy (>=0.9.1)", "pytest-black (>=0.3.7)", "pytest-enabler (>=1.3)", "pytest-cov", "pytest-flake8", "pytest-checkdocs (>=2.4)", "pytest (>=6)"] +docs = ["jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "jaraco.packaging (>=9)", "sphinx"] [[package]] name = "iniconfig" @@ -307,7 +307,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "jsonschema" -version = "4.9.0" +version = "4.13.0" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false @@ -322,12 +322,12 @@ pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["webcolors (>=1.11)", "uri-template", "rfc3986-validator (>0.1.0)", "rfc3339-validator", "jsonpointer (>1.13)", "isoduration", "idna", "fqdn"] +format = ["webcolors (>=1.11)", "uri-template", "rfc3987", "rfc3339-validator", "jsonpointer (>1.13)", "isoduration", "idna", "fqdn"] [[package]] name = "keyring" -version = "23.7.0" +version = "23.8.2" description = "Store and access your passwords safely." category = "dev" optional = false @@ -340,8 +340,8 @@ pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_ SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +testing = ["pytest-mypy (>=0.9.1)", "pytest-black (>=0.3.7)", "pytest-enabler (>=1.3)", "pytest-cov", "flake8 (<5)", "pytest-flake8", "pytest-checkdocs (>=2.4)", "pytest (>=6)"] +docs = ["jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "jaraco.packaging (>=9)", "sphinx"] [[package]] name = "mccabe" @@ -486,12 +486,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.12.0" +version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.6" +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pyparsing" version = "3.0.9" @@ -643,7 +646,7 @@ python-versions = ">=3.6" [[package]] name = "readme-renderer" -version = "35.0" +version = "36.0" description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse" category = "dev" optional = false @@ -713,7 +716,7 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] [[package]] name = "secretstorage" -version = "3.3.2" +version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" category = "dev" optional = false @@ -812,9 +815,9 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +secure = ["ipaddress", "certifi", "idna (>=2.0.0)", "cryptography (>=1.3.4)", "pyOpenSSL (>=0.14)"] +brotli = ["brotlipy (>=0.6.0)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] [[package]] name = "vcrpy" @@ -868,8 +871,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +testing = ["pytest-mypy (>=0.9.1)", "pytest-black (>=0.3.7)", "func-timeout", "jaraco.itertools", "pytest-enabler (>=1.3)", "pytest-cov", "pytest-flake8", "pytest-checkdocs (>=2.4)", "pytest (>=6)"] +docs = ["jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "jaraco.packaging (>=9)", "sphinx"] [metadata] lock-version = "1.1" @@ -881,20 +884,111 @@ argcomplete = [ {file = "argcomplete-2.0.0-py2.py3-none-any.whl", hash = "sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e"}, {file = "argcomplete-2.0.0.tar.gz", hash = "sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20"}, ] -atomicwrites = [] -attrs = [] -aws-xray-sdk = [] -bleach = [] -boto3 = [] -botocore = [] -certifi = [] -cffi = [] -charset-normalizer = [] +atomicwrites = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] +attrs = [ + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, +] +aws-xray-sdk = [ + {file = "aws-xray-sdk-2.10.0.tar.gz", hash = "sha256:9b14924fd0628cf92936055864655354003f0b1acc3e1c3ffde6403d0799dd7a"}, + {file = "aws_xray_sdk-2.10.0-py2.py3-none-any.whl", hash = "sha256:7551e81a796e1a5471ebe84844c40e8edf7c218db33506d046fec61f7495eda4"}, +] +bleach = [ + {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, + {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, +] +boto3 = [ + {file = "boto3-1.24.55-py3-none-any.whl", hash = "sha256:4b620f55f3015c516a8f8063b02060a7bb9a763e10de3c0f3ec90102cdfa28db"}, + {file = "boto3-1.24.55.tar.gz", hash = "sha256:9fe6c7c5019671cbea82f02dbaae7e743ec86187443ab5f333ebb3d3bef63dce"}, +] +botocore = [ + {file = "botocore-1.27.55-py3-none-any.whl", hash = "sha256:0b4a17e81c17845245c0e7a3fbf83753c7f6a5544b93dcf6e0fcc0f3f2156ab2"}, + {file = "botocore-1.27.55.tar.gz", hash = "sha256:929d6be4bdb33a693e6c8e06383dba76fa628bb72fdb1f9353fd13f5d115dd19"}, +] +certifi = [ + {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, + {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, +] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.1.0.tar.gz", hash = "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"}, + {file = "charset_normalizer-2.1.0-py3-none-any.whl", hash = "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5"}, +] click = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] -colorama = [] +colorama = [ + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, +] coverage = [ {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, @@ -962,13 +1056,22 @@ flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] -freezegun = [] +freezegun = [ + {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, + {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, +] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] -importlib-metadata = [] -importlib-resources = [] +importlib-metadata = [ + {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, + {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, +] +importlib-resources = [ + {file = "importlib_resources-5.9.0-py3-none-any.whl", hash = "sha256:f78a8df21a79bcc30cfd400bdc38f314333de7c0fb619763f6b9dabab8268bb7"}, + {file = "importlib_resources-5.9.0.tar.gz", hash = "sha256:5481e97fb45af8dcf2f798952625591c58fe599d0735d86b10f54de086a61681"}, +] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -977,7 +1080,10 @@ jeepney = [ {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, ] -jmespath = [] +jmespath = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] jsonpatch = [ {file = "jsonpatch-1.32-py2.py3-none-any.whl", hash = "sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397"}, {file = "jsonpatch-1.32.tar.gz", hash = "sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2"}, @@ -986,8 +1092,14 @@ jsonpointer = [ {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, ] -jsonschema = [] -keyring = [] +jsonschema = [ + {file = "jsonschema-4.13.0-py3-none-any.whl", hash = "sha256:870a61bb45050b81103faf6a4be00a0a906e06636ffcf0b84f5a2e51faf901ff"}, + {file = "jsonschema-4.13.0.tar.gz", hash = "sha256:3776512df4f53f74e6e28fe35717b5b223c1756875486984a31bc9165e7fc920"}, +] +keyring = [ + {file = "keyring-23.8.2-py3-none-any.whl", hash = "sha256:10d2a8639663fe2090705a00b8c47c687cacdf97598ea9c11456679fa974473a"}, + {file = "keyring-23.8.2.tar.gz", hash = "sha256:0d9973f8891850f1ade5f26aafd06bb16865fbbae3fc56b0defb6a14a2624003"}, +] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, @@ -1065,7 +1177,10 @@ pkginfo = [ {file = "pkginfo-1.8.3-py2.py3-none-any.whl", hash = "sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594"}, {file = "pkginfo-1.8.3.tar.gz", hash = "sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c"}, ] -pkgutil-resolve-name = [] +pkgutil-resolve-name = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] placebo = [ {file = "placebo-0.9.0.tar.gz", hash = "sha256:03157f8527bbc2965b71b88f4a139ef8038618b346787f20d63e3c5da541b047"}, ] @@ -1073,7 +1188,10 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] -portalocker = [] +portalocker = [ + {file = "portalocker-2.5.1-py2.py3-none-any.whl", hash = "sha256:400bae275366e7b840d4baad0654c6ec5994e07c40c423d78e9e1340279b8352"}, + {file = "portalocker-2.5.1.tar.gz", hash = "sha256:ae8e9cc2660da04bf41fa1a0eef7e300bb5e4a5869adfb1a6d8551632b559b2b"}, +] psutil = [ {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87"}, {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af"}, @@ -1125,8 +1243,8 @@ pyflakes = [ {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, + {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, + {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, @@ -1167,7 +1285,10 @@ pytest-forked = [ {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, ] -pytest-sugar = [] +pytest-sugar = [ + {file = "pytest-sugar-0.9.5.tar.gz", hash = "sha256:eea78b6f15b635277d3d90280cd386d8feea1cab0f9be75947a626e8b02b477d"}, + {file = "pytest_sugar-0.9.5-py2.py3-none-any.whl", hash = "sha256:3da42de32ce4e1e95b448d61c92804433f5d4058c0a765096991c2e93d5a289f"}, +] pytest-terraform = [ {file = "pytest-terraform-0.6.1.tar.gz", hash = "sha256:a5af30e37a5d0394cda8eb5b4d1ef29346ef3e70de1c4df0337a64d879533c7d"}, {file = "pytest_terraform-0.6.1-py3-none-any.whl", hash = "sha256:e9e47a68abd0d58e0adec59d77791689beaab0c428393085a62a10aefc83c79f"}, @@ -1236,10 +1357,13 @@ pyyaml = [ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] readme-renderer = [ - {file = "readme_renderer-35.0-py3-none-any.whl", hash = "sha256:73b84905d091c31f36e50b4ae05ae2acead661f6a09a9abb4df7d2ddcdb6a698"}, - {file = "readme_renderer-35.0.tar.gz", hash = "sha256:a727999acfc222fc21d82a12ed48c957c4989785e5865807c65a487d21677497"}, + {file = "readme_renderer-36.0-py3-none-any.whl", hash = "sha256:2c37e472ca96755caba6cc58bcbf673a5574bc033385a2ac91d85dfef2799876"}, + {file = "readme_renderer-36.0.tar.gz", hash = "sha256:f71aeef9a588fcbed1f4cc001ba611370e94a0cd27c75b1140537618ec78f0a2"}, +] +requests = [ + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] -requests = [] requests-toolbelt = [ {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, @@ -1253,14 +1377,18 @@ s3transfer = [ {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, ] secretstorage = [ - {file = "SecretStorage-3.3.2-py3-none-any.whl", hash = "sha256:755dc845b6ad76dcbcbc07ea3da75ae54bb1ea529eb72d15f83d26499a5df319"}, - {file = "SecretStorage-3.3.2.tar.gz", hash = "sha256:0a8eb9645b320881c222e827c26f4cfcf55363e8b374a021981ef886657a912f"}, + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -tabulate = [] +tabulate = [ + {file = "tabulate-0.8.10-py3-none-any.whl", hash = "sha256:0ba055423dbaa164b9e456abe7920c5e8ed33fcc16f6d1b2f2d152c8e1e8b4fc"}, + {file = "tabulate-0.8.10-py3.8.egg", hash = "sha256:436f1c768b424654fce8597290d2764def1eea6a77cfa5c33be00b1bc0f4f63d"}, + {file = "tabulate-0.8.10.tar.gz", hash = "sha256:6c57f3f3dd7ac2782770155f3adb2db0b1a269637e42f27599925e64b114f519"}, +] termcolor = [ {file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"}, ] @@ -1276,9 +1404,18 @@ twine = [ {file = "twine-3.8.0-py3-none-any.whl", hash = "sha256:d0550fca9dc19f3d5e8eadfce0c227294df0a2a951251a4385797c8a6198b7c8"}, {file = "twine-3.8.0.tar.gz", hash = "sha256:8efa52658e0ae770686a13b675569328f1fba9837e5de1867bfe5f46a9aefe19"}, ] -typing-extensions = [] -urllib3 = [] -vcrpy = [] +typing-extensions = [ + {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, + {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, +] +urllib3 = [ + {file = "urllib3-1.26.11-py2.py3-none-any.whl", hash = "sha256:c33ccba33c819596124764c23a97d25f32b28433ba0dedeb77d873a38722c9bc"}, + {file = "urllib3-1.26.11.tar.gz", hash = "sha256:ea6e8fb210b19d950fab93b60c9009226c63a28808bc8386e05301e25883ac0a"}, +] +vcrpy = [ + {file = "vcrpy-4.2.0-py2.py3-none-any.whl", hash = "sha256:7ec280c8d5385652f1117fe32a200e6676614007d9f946af9f07df1e5f92254c"}, + {file = "vcrpy-4.2.0.tar.gz", hash = "sha256:94520b86fb765925adc8c77ff934e89a5e156c28e74a314320217ef1b454afe0"}, +] webencodings = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -1349,5 +1486,68 @@ wrapt = [ {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, ] -yarl = [] -zipp = [] +yarl = [ + {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:abc06b97407868ef38f3d172762f4069323de52f2b70d133d096a48d72215d28"}, + {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:07b21e274de4c637f3e3b7104694e53260b5fc10d51fb3ec5fed1da8e0f754e3"}, + {file = "yarl-1.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9de955d98e02fab288c7718662afb33aab64212ecb368c5dc866d9a57bf48880"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ec362167e2c9fd178f82f252b6d97669d7245695dc057ee182118042026da40"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20df6ff4089bc86e4a66e3b1380460f864df3dd9dccaf88d6b3385d24405893b"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5999c4662631cb798496535afbd837a102859568adc67d75d2045e31ec3ac497"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed19b74e81b10b592084a5ad1e70f845f0aacb57577018d31de064e71ffa267a"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e4808f996ca39a6463f45182e2af2fae55e2560be586d447ce8016f389f626f"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2d800b9c2eaf0684c08be5f50e52bfa2aa920e7163c2ea43f4f431e829b4f0fd"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6628d750041550c5d9da50bb40b5cf28a2e63b9388bac10fedd4f19236ef4957"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f5af52738e225fcc526ae64071b7e5342abe03f42e0e8918227b38c9aa711e28"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:76577f13333b4fe345c3704811ac7509b31499132ff0181f25ee26619de2c843"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c03f456522d1ec815893d85fccb5def01ffaa74c1b16ff30f8aaa03eb21e453"}, + {file = "yarl-1.8.1-cp310-cp310-win32.whl", hash = "sha256:ea30a42dc94d42f2ba4d0f7c0ffb4f4f9baa1b23045910c0c32df9c9902cb272"}, + {file = "yarl-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:9130ddf1ae9978abe63808b6b60a897e41fccb834408cde79522feb37fb72fb0"}, + {file = "yarl-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0ab5a138211c1c366404d912824bdcf5545ccba5b3ff52c42c4af4cbdc2c5035"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0fb2cb4204ddb456a8e32381f9a90000429489a25f64e817e6ff94879d432fc"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85cba594433915d5c9a0d14b24cfba0339f57a2fff203a5d4fd070e593307d0b"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca7e596c55bd675432b11320b4eacc62310c2145d6801a1f8e9ad160685a231"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f77539733e0ec2475ddcd4e26777d08996f8cd55d2aef82ec4d3896687abda"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29e256649f42771829974e742061c3501cc50cf16e63f91ed8d1bf98242e5507"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7fce6cbc6c170ede0221cc8c91b285f7f3c8b9fe28283b51885ff621bbe0f8ee"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:59ddd85a1214862ce7c7c66457f05543b6a275b70a65de366030d56159a979f0"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:12768232751689c1a89b0376a96a32bc7633c08da45ad985d0c49ede691f5c0d"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:b19255dde4b4f4c32e012038f2c169bb72e7f081552bea4641cab4d88bc409dd"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6c8148e0b52bf9535c40c48faebb00cb294ee577ca069d21bd5c48d302a83780"}, + {file = "yarl-1.8.1-cp37-cp37m-win32.whl", hash = "sha256:de839c3a1826a909fdbfe05f6fe2167c4ab033f1133757b5936efe2f84904c07"}, + {file = "yarl-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:dd032e8422a52e5a4860e062eb84ac94ea08861d334a4bcaf142a63ce8ad4802"}, + {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19cd801d6f983918a3f3a39f3a45b553c015c5aac92ccd1fac619bd74beece4a"}, + {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6347f1a58e658b97b0a0d1ff7658a03cb79bdbda0331603bed24dd7054a6dea1"}, + {file = "yarl-1.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c0da7e44d0c9108d8b98469338705e07f4bb7dab96dbd8fa4e91b337db42548"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5587bba41399854703212b87071c6d8638fa6e61656385875f8c6dff92b2e461"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31a9a04ecccd6b03e2b0e12e82131f1488dea5555a13a4d32f064e22a6003cfe"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:205904cffd69ae972a1707a1bd3ea7cded594b1d773a0ce66714edf17833cdae"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea513a25976d21733bff523e0ca836ef1679630ef4ad22d46987d04b372d57fc"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0b51530877d3ad7a8d47b2fff0c8df3b8f3b8deddf057379ba50b13df2a5eae"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2b8f245dad9e331540c350285910b20dd913dc86d4ee410c11d48523c4fd546"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ab2a60d57ca88e1d4ca34a10e9fb4ab2ac5ad315543351de3a612bbb0560bead"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:449c957ffc6bc2309e1fbe67ab7d2c1efca89d3f4912baeb8ead207bb3cc1cd4"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a165442348c211b5dea67c0206fc61366212d7082ba8118c8c5c1c853ea4d82e"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b3ded839a5c5608eec8b6f9ae9a62cb22cd037ea97c627f38ae0841a48f09eae"}, + {file = "yarl-1.8.1-cp38-cp38-win32.whl", hash = "sha256:c1445a0c562ed561d06d8cbc5c8916c6008a31c60bc3655cdd2de1d3bf5174a0"}, + {file = "yarl-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:56c11efb0a89700987d05597b08a1efcd78d74c52febe530126785e1b1a285f4"}, + {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e80ed5a9939ceb6fda42811542f31c8602be336b1fb977bccb012e83da7e4936"}, + {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6afb336e23a793cd3b6476c30f030a0d4c7539cd81649683b5e0c1b0ab0bf350"}, + {file = "yarl-1.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c322cbaa4ed78a8aac89b2174a6df398faf50e5fc12c4c191c40c59d5e28357"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fae37373155f5ef9b403ab48af5136ae9851151f7aacd9926251ab26b953118b"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5395da939ffa959974577eff2cbfc24b004a2fb6c346918f39966a5786874e54"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:076eede537ab978b605f41db79a56cad2e7efeea2aa6e0fa8f05a26c24a034fb"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1a50e461615747dd93c099f297c1994d472b0f4d2db8a64e55b1edf704ec1c"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7de89c8456525650ffa2bb56a3eee6af891e98f498babd43ae307bd42dca98f6"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a88510731cd8d4befaba5fbd734a7dd914de5ab8132a5b3dde0bbd6c9476c64"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d93a049d29df172f48bcb09acf9226318e712ce67374f893b460b42cc1380ae"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:21ac44b763e0eec15746a3d440f5e09ad2ecc8b5f6dcd3ea8cb4773d6d4703e3"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0272228fabe78ce00a3365ffffd6f643f57a91043e119c289aaba202f4095b0"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99449cd5366fe4608e7226c6cae80873296dfa0cde45d9b498fefa1de315a09e"}, + {file = "yarl-1.8.1-cp39-cp39-win32.whl", hash = "sha256:8b0af1cf36b93cee99a31a545fe91d08223e64390c5ecc5e94c39511832a4bb6"}, + {file = "yarl-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:de49d77e968de6626ba7ef4472323f9d2e5a56c1d85b7c0e2a190b2173d3b9be"}, + {file = "yarl-1.8.1.tar.gz", hash = "sha256:af887845b8c2e060eb5605ff72b6f2dd2aab7a761379373fd89d314f4752abbf"}, +] +zipp = [ + {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, + {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, +] diff --git a/tools/c7n_mailer/README.md b/tools/c7n_mailer/README.md index 723f93f2566..38fbfcb935c 100644 --- a/tools/c7n_mailer/README.md +++ b/tools/c7n_mailer/README.md @@ -84,7 +84,7 @@ The standard way to do a DataDog integration is use the c7n integration with AWS CloudWatch and use the [DataDog integration with AWS](https://docs.datadoghq.com/integrations/amazon_web_services/) to collect CloudWatch metrics. The mailer/messenger integration is only -for the case you don't want or you can't use AWS CloudWatch. +for the case you don't want or you can't use AWS CloudWatch, e.g. in Azure or GCP. Note this integration requires the additional dependency of datadog python bindings: ``` @@ -266,12 +266,14 @@ configuration you specify in a YAML file. Here is [the schema](./c7n_mailer/cli.py#L11-L41) to which the file must conform, and here is a description of the options: -| Required? | Key | Type | Notes | -|:---------:|:----------------|:-----------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| ✅ | `queue_url` | string | the queue to listen to for messages | -| | `from_address` | string | default from address | -| | `endpoint_url` | string | SQS API URL (for use with VPC Endpoints) | -| | `contact_tags` | array of strings | tags that we should look at for address information | +| Required? | Key | Type | Notes | +|:---------:|:----------------|:-----------------|:------------------------------------------------------------------| +| ✅ | `queue_url` | string | the queue to listen to for messages | +| | `from_address` | string | default from address | +| | `endpoint_url` | string | SQS API URL (for use with VPC Endpoints) | +| | `contact_tags` | array of strings | tags that we should look at for address information | +| | `email_base_url`| string | Base URL to construct a valid email address from a resource owner | + ### Standard Lambda Function Config @@ -291,9 +293,9 @@ and here is a description of the options: | Required? | Key | Type | Notes | |:---------:|:----------------------|:-------|:---------------------------------------------------------------------------------------| | | `function_properties` | object | Contains `appInsights`, `storageAccount` and `servicePlan` objects | -| | `appInsights` | object | Contains `name`, `location` and `resourceGroupName` properties | -| | `storageAccount` | object | Contains `name`, `location` and `resourceGroupName` properties | -| | `servicePlan` | object | Contains `name`, `location`, `resourceGroupName`, `skuTier` and `skuName` properties | +| | `appInsights` | object | Contains `name`, `location` and `resourceGroupName` properties | +| | `storageAccount` | object | Contains `name`, `location` and `resourceGroupName` properties | +| | `servicePlan` | object | Contains `name`, `location`, `resourceGroupName`, `skuTier` and `skuName` properties | | | `name` | string | | | | `location` | string | Default: `west us 2` | | | `resourceGroupName` | string | Default `cloud-custodian` | @@ -312,7 +314,7 @@ and here is a description of the options: | | `debug` | boolean | debug on/off | | | `ldap_bind_dn` | string | eg: ou=people,dc=example,dc=com | | | `ldap_bind_user` | string | eg: FOO\\BAR | -| | `ldap_bind_password` | string | ldap bind password | +| | `ldap_bind_password` | secured string | ldap bind password | | | `ldap_bind_password_in_kms` | boolean | defaults to true, most people (except capone) want to set this to false. If set to true, make sure `ldap_bind_password` contains your KMS encrypted ldap bind password as a base64-encoded string. | | | `ldap_email_attribute` | string | | | | `ldap_email_key` | string | eg 'mail' | @@ -410,6 +412,27 @@ You can store your secrets in Azure Key Vault secrets and reference them from th Note: `secrets.get` permission on the KeyVault for the Service Principal is required. +#### GCP + +You can store your secrets as GCP Secret Manager secrets and reference them from the policy. + +```yaml + plaintext_secret: + secured_string: + type: gcp.secretmanager + secret: projects/12345678912/secrets/your-secret +``` + +An example of an SMTP password set as a secured string: + +```yaml + smtp_password: + type: gcp.secretmanager + secret: projects/59808015552/secrets/smtp_pw +``` + +Note: If you do not specify a version, `/versions/latest` will be appended to your secret location. + ## Configuring a policy to send email Outbound email can be added to any policy by including the `notify` action. @@ -601,6 +624,64 @@ function_properties: type: SystemAssigned ``` +## Using on GCP + +Requires: + +- `c7n_gcp` package. See [GCP Getting Started](https://cloudcustodian.io/docs/gcp/gettingstarted.html) +- `google-cloud-secret-manager` package, for pulling in secured string values. +- A working SMTP Account. +- [GCP Pubsub Subscription](https://cloud.google.com/pubsub/docs/) + +The mailer supports GCP Pubsub transports and SMTP/Email delivery, as well as Datadog and Splunk. +Configuration for this scenario requires only minor changes from AWS deployments. + +The notify action in your policy will reflect transport type `projects` with the URL +to a GCP Pub/Sub Topic. For example: + +```yaml +policies: + - name: gcp-notify + resource: gcp.compute + description: example policy + actions: + - type: notify + template: default + priority_header: '2' + subject: Hello from C7N Mailer + to: + - you@youremail.com + transport: + type: pubsub + topic: projects/myproject/topics/mytopic +``` + +In your mailer configuration, you'll need to provide your SMTP account information +as well as your topic subscription path in the queue_url variable. Please note that the +subscription you specify should be subscribed to the topic you assign in your policies' +notify action for GCP resources. + +```yaml +queue_url: projects/myproject/subscriptions/mysubscription +from_address: you@youremail.com +# c7n-mailer currently requires a role be present, even if it's empty +role: "" + +smtp_server: my.smtp.add.ress +smtp_port: 25 +smtp_ssl: true +smtp_username: smtpuser +smtp_password: + type: gcp.secretmanager + secret: projects/12345678912/secrets/smtppassword +``` + +The mailer will transmit all messages found on the queue on each execution using SMTP/Email delivery. + +### Deploying GCP Functions + +GCP Cloud Functions for c7n-mailer are currently not supported. + ## Writing an email template Templates are authored in [jinja2](http://jinja.pocoo.org/docs/dev/templates/). diff --git a/tools/c7n_mailer/c7n_mailer/azure_mailer/azure_queue_processor.py b/tools/c7n_mailer/c7n_mailer/azure_mailer/azure_queue_processor.py index aed5ff09360..6c0faa46014 100644 --- a/tools/c7n_mailer/c7n_mailer/azure_mailer/azure_queue_processor.py +++ b/tools/c7n_mailer/c7n_mailer/azure_mailer/azure_queue_processor.py @@ -11,6 +11,7 @@ from c7n_mailer.azure_mailer.sendgrid_delivery import SendGridDelivery from c7n_mailer.smtp_delivery import SmtpDelivery +from c7n_mailer.target import MessageTargetMixin try: from c7n_azure.storage_utils import StorageUtilities @@ -21,7 +22,7 @@ pass -class MailerAzureQueueProcessor: +class MailerAzureQueueProcessor(MessageTargetMixin): def __init__(self, config, logger, session=None, max_num_processes=16): if StorageUtilities is None: @@ -48,7 +49,9 @@ def run(self, parallel=False): for queue_message in queue_messages: self.logger.debug("Message id: %s received" % queue_message.id) - if (self.process_azure_queue_message(queue_message) or + if ( + self.process_azure_queue_message( + queue_message, str(queue_message.inserted_on)) or queue_message.dequeue_count > self.max_message_retry): # If message handled successfully or max retry hit, delete StorageUtilities.delete_queue_message(*queue_settings, message=queue_message) @@ -58,7 +61,7 @@ def run(self, parallel=False): self.logger.info('No messages left on the azure storage queue, exiting c7n_mailer.') - def process_azure_queue_message(self, encoded_azure_queue_message): + def process_azure_queue_message(self, encoded_azure_queue_message, timestamp): queue_message = json.loads( zlib.decompress(base64.b64decode(encoded_azure_queue_message.content))) @@ -70,12 +73,7 @@ def process_azure_queue_message(self, encoded_azure_queue_message): queue_message['policy']['name'], ', '.join(queue_message['action'].get('to', [])))) - if any(e.startswith('slack') or e.startswith('https://hooks.slack.com/') - for e in queue_message.get('action', ()).get('to', [])): - self._deliver_slack_message(queue_message) - - if any(e.startswith('datadog') for e in queue_message.get('action', ()).get('to', [])): - self._deliver_datadog_message(queue_message) + self.handle_targets(queue_message, timestamp, email_delivery=False, sns_delivery=False) email_result = self._deliver_email(queue_message) @@ -84,29 +82,6 @@ def process_azure_queue_message(self, encoded_azure_queue_message): else: return True - def _deliver_slack_message(self, queue_message): - from c7n_mailer.slack_delivery import SlackDelivery - slack_delivery = SlackDelivery(self.config, - self.logger, - SendGridDelivery(self.config, self.session, self.logger)) - slack_messages = slack_delivery.get_to_addrs_slack_messages_map(queue_message) - try: - self.logger.info('Sending message to Slack.') - slack_delivery.slack_handler(queue_message, slack_messages) - except Exception as error: - self.logger.exception(error) - - def _deliver_datadog_message(self, queue_message): - from c7n_mailer.datadog_delivery import DataDogDelivery - datadog_delivery = DataDogDelivery(self.config, self.session, self.logger) - datadog_message_packages = datadog_delivery.get_datadog_message_packages(queue_message) - - try: - self.logger.info('Sending message to Datadog.') - datadog_delivery.deliver_datadog_messages(datadog_message_packages, queue_message) - except Exception as error: - self.logger.exception(error) - def _deliver_email(self, queue_message): try: sendgrid_delivery = SendGridDelivery(self.config, self.session, self.logger) diff --git a/tools/c7n_mailer/c7n_mailer/cli.py b/tools/c7n_mailer/c7n_mailer/cli.py index d1d1156718d..71a1568a3d4 100755 --- a/tools/c7n_mailer/c7n_mailer/cli.py +++ b/tools/c7n_mailer/c7n_mailer/cli.py @@ -9,9 +9,11 @@ import jsonschema import yaml from c7n_mailer import deploy, utils +from c7n_mailer.sqs_queue_processor import MailerSqsQueueProcessor from c7n_mailer.azure_mailer.azure_queue_processor import MailerAzureQueueProcessor +from c7n_mailer.gcp_mailer.gcp_queue_processor import MailerGcpQueueProcessor from c7n_mailer.azure_mailer import deploy as azure_deploy -from c7n_mailer.sqs_queue_processor import MailerSqsQueueProcessor +# from c7n_mailer.gcp_mailer import deploy as gcp_deploy from c7n_mailer.utils import get_provider, Providers AZURE_KV_SECRET_SCHEMA = { @@ -24,10 +26,21 @@ 'additionalProperties': False } +GCP_SECRET_SCHEMA = { + 'type': 'object', + 'properties': { + 'type': {'enum': ['gcp.secretmanager']}, + 'secret': {'type': 'string'} + }, + 'required': ['type', 'secret'], + 'additionalProperties': False +} + SECURED_STRING_SCHEMA = { 'oneOf': [ {'type': 'string'}, - AZURE_KV_SECRET_SCHEMA + AZURE_KV_SECRET_SCHEMA, + GCP_SECRET_SCHEMA ] } @@ -116,9 +129,11 @@ ] }, }, - 'function_schedule': {'type': 'string'}, - 'function_skuCode': {'type': 'string'}, - 'function_sku': {'type': 'string'}, + # GCP Cloud Function Config # TODO: + # 'function_schedule': {'type': 'string'}, + # 'function_skuCode': {'type': 'string'}, + # 'function_sku': {'type': 'string'}, + 'email_base_url': {'type': 'string'}, # Mailer Infrastructure Config 'cache_engine': {'type': 'string'}, @@ -138,7 +153,7 @@ 'ldap_manager_attribute': {'type': 'string'}, 'ldap_email_attribute': {'type': 'string'}, 'ldap_bind_password_in_kms': {'type': 'boolean'}, - 'ldap_bind_password': {'type': 'string'}, + 'ldap_bind_password': SECURED_STRING_SCHEMA, 'cross_accounts': {'type': 'object'}, 'ses_region': {'type': 'string'}, 'redis_host': {'type': 'string'}, @@ -251,6 +266,8 @@ def main(): if provider == Providers.Azure: azure_deploy.provision(mailer_config) + # elif provider == Providers.GCP: # TODO: + # gcp_deploy.provision(mailer_config) elif provider == Providers.AWS: deploy.provision(mailer_config, functools.partial(session_factory, mailer_config)) @@ -260,6 +277,8 @@ def main(): # Select correct processor if provider == Providers.Azure: processor = MailerAzureQueueProcessor(mailer_config, logger) + elif provider == Providers.GCP: + processor = MailerGcpQueueProcessor(mailer_config, logger) elif provider == Providers.AWS: aws_session = session_factory(mailer_config) processor = MailerSqsQueueProcessor(mailer_config, aws_session, logger) diff --git a/tools/c7n_mailer/c7n_mailer/email_delivery.py b/tools/c7n_mailer/c7n_mailer/email_delivery.py index 5cc1abb9980..e25ff7d1a97 100644 --- a/tools/c7n_mailer/c7n_mailer/email_delivery.py +++ b/tools/c7n_mailer/c7n_mailer/email_delivery.py @@ -3,13 +3,13 @@ from itertools import chain from c7n_mailer.smtp_delivery import SmtpDelivery -from c7n_mailer.utils_email import is_email, get_mimetext_message import c7n_mailer.azure_mailer.sendgrid_delivery as sendgrid from .ldap_lookup import LdapLookup from .utils import ( - get_resource_tag_targets, - kms_decrypt, get_aws_username_from_event) + decrypt, get_resource_tag_targets, get_provider, + get_aws_username_from_event, Providers) +from .utils_email import get_mimetext_message, is_email class EmailDelivery: @@ -18,25 +18,38 @@ def __init__(self, config, session, logger): self.config = config self.logger = logger self.session = session - self.aws_ses = session.client('ses', region_name=config.get('ses_region')) + self.provider = get_provider(self.config) + if self.provider == Providers.AWS: + self.aws_ses = session.client('ses', region_name=config.get('ses_region')) self.ldap_lookup = self.get_ldap_connection() + self.provider = get_provider(self.config) def get_ldap_connection(self): if self.config.get('ldap_uri'): - self.config['ldap_bind_password'] = kms_decrypt(self.config, self.logger, - self.session, 'ldap_bind_password') + credential = decrypt( + self.config, self.logger, self.session, 'ldap_bind_password') + self.config['ldap_bind_password'] = credential return LdapLookup(self.config, self.logger) return None def get_valid_emails_from_list(self, targets): emails = [] for target in targets: + if target in ('resource-owner', 'event-owner'): + continue for email in target.split(':'): if is_email(target): emails.append(target) + # gcp doesn't support the '@' character in their label values so we + # allow users to specify an email_base_url to append to the end of their + # owner contact tags + if not is_email(target) and self.config.get('email_base_url'): + target = "%s@%s" % (target, self.config['email_base_url']) + if is_email(target): + emails.append(target) return emails - def get_event_owner_email(self, targets, event): + def get_event_owner_email(self, targets, event): # TODO: GCP-friendly if 'event-owner' in targets: aws_username = get_aws_username_from_event(self.logger, event) if aws_username: @@ -108,7 +121,7 @@ def get_resource_owner_emails_from_resource(self, sqs_message, resource): return list(chain(explicit_emails, ldap_emails, org_emails)) - def get_account_emails(self, sqs_message): + def get_account_emails(self, sqs_message): # TODO: GCP-friendly email_list = [] if 'account-emails' not in sqs_message['action'].get('to', []): diff --git a/tools/c7n_mailer/c7n_mailer/gcp_mailer/__init__.py b/tools/c7n_mailer/c7n_mailer/gcp_mailer/__init__.py new file mode 100644 index 00000000000..ff2d798899d --- /dev/null +++ b/tools/c7n_mailer/c7n_mailer/gcp_mailer/__init__.py @@ -0,0 +1,2 @@ +# Copyright The Cloud Custodian Authors. +# SPDX-License-Identifier: Apache-2.0 diff --git a/tools/c7n_mailer/c7n_mailer/gcp_mailer/gcp_queue_processor.py b/tools/c7n_mailer/c7n_mailer/gcp_mailer/gcp_queue_processor.py new file mode 100644 index 00000000000..bd60d139b2f --- /dev/null +++ b/tools/c7n_mailer/c7n_mailer/gcp_mailer/gcp_queue_processor.py @@ -0,0 +1,77 @@ +# Copyright The Cloud Custodian Authors. +# SPDX-License-Identifier: Apache-2.0 +""" +Google Queue Message Processing +============================== + +""" +import base64 +import json +import zlib + +from c7n_mailer.target import MessageTargetMixin + +try: + from c7n_gcp.client import Session +except ImportError: + raise Exception("Using GCP Pub/Sub with c7n_mailer requires package c7n_gcp to be installed.") + +MAX_MESSAGES = 1000 + + +class MailerGcpQueueProcessor(MessageTargetMixin): + def __init__(self, config, logger, session=None): + self.config = config + self.logger = logger + self.subscription = self.config["queue_url"] + self.session = session or Session() + self.client = self.session.client("pubsub", "v1", "projects.subscriptions") + + def run(self): + self.logger.info("Downloading messages from the GCP PubSub Subscription.") + + # Get first set of messages to process + messages = self.receive_messages() + + while messages and len(messages['receivedMessages']) > 0: + # Discard_date is the timestamp of the last published message in the messages list + # and will be the date we need to seek to when we ack_messages + discard_date = messages["receivedMessages"][-1]["message"]["publishTime"] + + # Process received messages + for message in messages["receivedMessages"]: + self.process_message(message, discard_date) + + # Acknowledge and purge processed messages then get next set of messages + self.ack_messages(discard_date) + messages = self.receive_messages() + + self.logger.info("No messages left in the gcp topic subscription, now exiting c7n_mailer.") + + # This function, when processing gcp pubsub messages, will deliver messages over email. + # Also support for Datadog and Slack + def process_message(self, encoded_gcp_pubsub_message, publish_date): + pubsub_message = self.unpack_to_dict(encoded_gcp_pubsub_message["message"]["data"]) + self.handle_targets(pubsub_message, publish_date, email_delivery=True, sns_delivery=False) + return True + + def receive_messages(self): + """Receive messsage(s) from subscribed topic""" + return self.client.execute_command( + "pull", + { + "subscription": self.subscription, + "body": {"returnImmediately": True, "max_messages": MAX_MESSAGES}, + }, + ) + + def ack_messages(self, discard_datetime): + """Acknowledge and Discard messages up to datetime using seek api command""" + return self.client.execute_command( + "seek", {"subscription": self.subscription, "body": {"time": discard_datetime}} + ) + + @staticmethod + def unpack_to_dict(encoded_gcp_pubsub_message): + """Returns a message as a dict that been base64 decoded""" + return json.loads(zlib.decompress(base64.b64decode(encoded_gcp_pubsub_message))) diff --git a/tools/c7n_mailer/c7n_mailer/gcp_mailer/utils.py b/tools/c7n_mailer/c7n_mailer/gcp_mailer/utils.py new file mode 100644 index 00000000000..9b490a9a094 --- /dev/null +++ b/tools/c7n_mailer/c7n_mailer/gcp_mailer/utils.py @@ -0,0 +1,29 @@ +# Copyright The Cloud Custodian Authors. +# SPDX-License-Identifier: Apache-2.0 + +from google.cloud import secretmanager + + +CACHE = {} + + +def gcp_decrypt(config, logger, encrypted_field, client=None): + global CACHE + if client is None: + client = secretmanager.SecretManagerServiceClient() + data = config[encrypted_field] + if isinstance(data, dict): + if "versions" not in data["secret"]: + secret = f"{data['secret']}/versions/latest" + else: + secret = data["secret"] + if secret in CACHE: + logger.debug(f"Using cached value for {secret}") + return CACHE[secret] + logger.debug(f'Accessing {data["secret"]}') + secret_value = client.access_secret_version(name=secret).payload.data.decode("UTF-8") + if secret not in CACHE: + CACHE[secret] = secret_value + return secret_value + + return data diff --git a/tools/c7n_mailer/c7n_mailer/ldap_lookup.py b/tools/c7n_mailer/c7n_mailer/ldap_lookup.py index 606eb744f9e..a151b79f0f1 100644 --- a/tools/c7n_mailer/c7n_mailer/ldap_lookup.py +++ b/tools/c7n_mailer/c7n_mailer/ldap_lookup.py @@ -55,6 +55,8 @@ def get_connection(self, ldap_uri, ldap_bind_user, ldap_bind_password): ) except LDAPSocketOpenError: self.log.error('Not able to establish a connection with LDAP.') + except Exception as e: + self.log.warning(f'Error occurred getting LDAP connection: {e}') def search_ldap(self, base_dn, ldap_filter, attributes): self.connection.search(base_dn, ldap_filter, attributes=self.attributes) diff --git a/tools/c7n_mailer/c7n_mailer/slack_delivery.py b/tools/c7n_mailer/c7n_mailer/slack_delivery.py index f33d1c47520..61d74c42580 100644 --- a/tools/c7n_mailer/c7n_mailer/slack_delivery.py +++ b/tools/c7n_mailer/c7n_mailer/slack_delivery.py @@ -1,5 +1,6 @@ # Copyright The Cloud Custodian Authors. # SPDX-License-Identifier: Apache-2.0 +import copy import time import requests @@ -24,9 +25,7 @@ def cache_factory(self, config, type): return None def get_to_addrs_slack_messages_map(self, sqs_message): - resource_list = [] - for resource in sqs_message['resources']: - resource_list.append(resource) + resource_list = copy.deepcopy(sqs_message['resources']) slack_messages = {} @@ -78,9 +77,9 @@ def get_to_addrs_slack_messages_map(self, sqs_message): resource_list, self.logger, 'slack_template', 'slack_default', self.config['templates_folders']) - elif target.startswith('slack://tag/') and 'Tags' in resource: + elif target.startswith('slack://tag/') and 'Tags' in resource_list[0]: tag_name = target.split('tag/', 1)[1] - result = next((item for item in resource.get('Tags', []) + result = next((item for item in resource_list[0].get('Tags', []) if item["Key"] == tag_name), None) if not result: self.logger.debug( diff --git a/tools/c7n_mailer/c7n_mailer/smtp_delivery.py b/tools/c7n_mailer/c7n_mailer/smtp_delivery.py index 1c5cb9124ee..5f369e83974 100644 --- a/tools/c7n_mailer/c7n_mailer/smtp_delivery.py +++ b/tools/c7n_mailer/c7n_mailer/smtp_delivery.py @@ -26,7 +26,10 @@ def __init__(self, config, session, logger): self._smtp_connection = smtp_connection def __del__(self): - self._smtp_connection.quit() + try: + self._smtp_connection.quit() + except smtplib.SMTPServerDisconnected: + pass def send_message(self, message, to_addrs): self._smtp_connection.sendmail(message['From'], to_addrs, message.as_string()) diff --git a/tools/c7n_mailer/c7n_mailer/sqs_queue_processor.py b/tools/c7n_mailer/c7n_mailer/sqs_queue_processor.py index 573b3492bb6..2e9f1ea5486 100644 --- a/tools/c7n_mailer/c7n_mailer/sqs_queue_processor.py +++ b/tools/c7n_mailer/c7n_mailer/sqs_queue_processor.py @@ -8,13 +8,9 @@ import base64 import json import logging -import traceback import zlib -from .email_delivery import EmailDelivery -from .sns_delivery import SnsDelivery - -from c7n_mailer.utils import kms_decrypt +from c7n_mailer.target import MessageTargetMixin DATA_MESSAGE = "maidmsg/1.0" @@ -62,7 +58,7 @@ def ack(self, m): ReceiptHandle=m['ReceiptHandle']) -class MailerSqsQueueProcessor: +class MailerSqsQueueProcessor(MessageTargetMixin): def __init__(self, config, session, logger, max_num_processes=16): self.config = config @@ -144,62 +140,9 @@ def process_sqs_message(self, encoded_sqs_message): sqs_message['policy']['name'], ', '.join(sqs_message['action'].get('to', [])))) - # get the map of email_to_addresses to mimetext messages (with resources baked in) - # and send any emails (to SES or SMTP) if there are email addresses found - email_delivery = EmailDelivery(self.config, self.session, self.logger) - to_addrs_to_email_messages_map = email_delivery.get_to_addrs_email_messages_map(sqs_message) - for email_to_addrs, mimetext_msg in to_addrs_to_email_messages_map.items(): - email_delivery.send_c7n_email(sqs_message, list(email_to_addrs), mimetext_msg) - - # this sections gets the map of sns_to_addresses to rendered_jinja messages - # (with resources baked in) and delivers the message to each sns topic - sns_delivery = SnsDelivery(self.config, self.session, self.logger) - sns_message_packages = sns_delivery.get_sns_message_packages(sqs_message) - sns_delivery.deliver_sns_messages(sns_message_packages, sqs_message) - - # this section sends a notification to the resource owner via Slack - if any(e.startswith('slack') or e.startswith('https://hooks.slack.com/') - for e in sqs_message.get('action', {}).get('to', []) + - sqs_message.get('action', {}).get('owner_absent_contact', [])): - from .slack_delivery import SlackDelivery - - if self.config.get('slack_token'): - self.config['slack_token'] = \ - kms_decrypt(self.config, self.logger, self.session, 'slack_token') - - slack_delivery = SlackDelivery(self.config, self.logger, email_delivery) - slack_messages = slack_delivery.get_to_addrs_slack_messages_map(sqs_message) - try: - slack_delivery.slack_handler(sqs_message, slack_messages) - except Exception: - traceback.print_exc() - pass - - # this section gets the map of metrics to send to datadog and delivers it - if any(e.startswith('datadog') for e in sqs_message.get('action', ()).get('to', [])): - from .datadog_delivery import DataDogDelivery - datadog_delivery = DataDogDelivery(self.config, self.session, self.logger) - datadog_message_packages = datadog_delivery.get_datadog_message_packages(sqs_message) - - try: - datadog_delivery.deliver_datadog_messages(datadog_message_packages, sqs_message) - except Exception: - traceback.print_exc() - pass - - # this section sends the full event to a Splunk HTTP Event Collector (HEC) - if any( - e.startswith('splunkhec://') - for e in sqs_message.get('action', ()).get('to', []) - ): - from .splunk_delivery import SplunkHecDelivery - splunk_delivery = SplunkHecDelivery(self.config, self.session, self.logger) - splunk_messages = splunk_delivery.get_splunk_payloads( - sqs_message, encoded_sqs_message['Attributes']['SentTimestamp'] - ) - - try: - splunk_delivery.deliver_splunk_messages(splunk_messages) - except Exception: - traceback.print_exc() - pass + self.handle_targets( + sqs_message, + encoded_sqs_message["Attributes"]["SentTimestamp"], + email_delivery=True, + sns_delivery=True + ) diff --git a/tools/c7n_mailer/c7n_mailer/target.py b/tools/c7n_mailer/c7n_mailer/target.py new file mode 100644 index 00000000000..f4ef749b278 --- /dev/null +++ b/tools/c7n_mailer/c7n_mailer/target.py @@ -0,0 +1,74 @@ +# Copyright The Cloud Custodian Authors. +# SPDX-License-Identifier: Apache-2.0 + +import traceback + +from .email_delivery import EmailDelivery +from .sns_delivery import SnsDelivery +from .utils import decrypt + + +class MessageTargetMixin(object): + + def handle_targets(self, message, sent_timestamp, email_delivery=True, sns_delivery=False): + # get the map of email_to_addresses to mimetext messages (with resources baked in) + # and send any emails (to SES or SMTP) if there are email addresses found + if email_delivery: + email_delivery = EmailDelivery(self.config, self.session, self.logger) + to_addrs_to_email_messages_map = email_delivery.get_to_addrs_email_messages_map(message) + for email_to_addrs, mimetext_msg in to_addrs_to_email_messages_map.items(): + email_delivery.send_c7n_email(message, list(email_to_addrs), mimetext_msg) + + # this sections gets the map of sns_to_addresses to rendered_jinja messages + # (with resources baked in) and delivers the message to each sns topic + if sns_delivery: + sns_delivery = SnsDelivery(self.config, self.session, self.logger) + sns_message_packages = sns_delivery.get_sns_message_packages(message) + sns_delivery.deliver_sns_messages(sns_message_packages, message) + + # this section sends a notification to the resource owner via Slack + if any(e.startswith('slack') or e.startswith('https://hooks.slack.com/') + for e in message.get('action', {}).get('to', []) + + message.get('action', {}).get('owner_absent_contact', [])): + from .slack_delivery import SlackDelivery + + if self.config.get('slack_token'): + self.config['slack_token'] = \ + decrypt(self.config, self.logger, self.session, 'slack_token') + + slack_delivery = SlackDelivery(self.config, self.logger, email_delivery) + slack_messages = slack_delivery.get_to_addrs_slack_messages_map(message) + try: + slack_delivery.slack_handler(message, slack_messages) + except Exception: + traceback.print_exc() + pass + + # this section gets the map of metrics to send to datadog and delivers it + if any(e.startswith('datadog') for e in message.get('action', ()).get('to')): + from .datadog_delivery import DataDogDelivery + datadog_delivery = DataDogDelivery(self.config, self.session, self.logger) + datadog_message_packages = datadog_delivery.get_datadog_message_packages(message) + + try: + datadog_delivery.deliver_datadog_messages(datadog_message_packages, message) + except Exception: + traceback.print_exc() + pass + + # this section sends the full event to a Splunk HTTP Event Collector (HEC) + if any( + e.startswith('splunkhec://') + for e in message.get('action', ()).get('to') + ): + from .splunk_delivery import SplunkHecDelivery + splunk_delivery = SplunkHecDelivery(self.config, self.session, self.logger) + splunk_messages = splunk_delivery.get_splunk_payloads( + message, sent_timestamp + ) + + try: + splunk_delivery.deliver_splunk_messages(splunk_messages) + except Exception: + traceback.print_exc() + pass diff --git a/tools/c7n_mailer/c7n_mailer/utils.py b/tools/c7n_mailer/c7n_mailer/utils.py index da81b9172be..cc29f49719a 100644 --- a/tools/c7n_mailer/c7n_mailer/utils.py +++ b/tools/c7n_mailer/c7n_mailer/utils.py @@ -22,6 +22,7 @@ class Providers: AWS = 0 Azure = 1 + GCP = 2 def get_jinja_env(template_folders): @@ -81,12 +82,11 @@ def get_rendered_jinja( # and this function would go through the resource and look for any tag keys # that match Owners or SupportTeam, and return those values as targets def get_resource_tag_targets(resource, target_tag_keys): - if 'Tags' not in resource: + if 'Tags' not in resource and 'labels' not in resource: return [] - if isinstance(resource['Tags'], dict): - tags = resource['Tags'] - else: - tags = {tag['Key']: tag['Value'] for tag in resource['Tags']} + tags = resource.get('Tags', []) or resource.get('labels', []) + if isinstance(tags, list): + tags = {tag['Key']: tag['Value'] for tag in tags} targets = [] for target_tag_key in target_tag_keys: if target_tag_key in tags: @@ -377,8 +377,10 @@ def resource_format(resource, resource_type): def get_provider(mailer_config): if mailer_config.get('queue_url', '').startswith('asq://'): return Providers.Azure - - return Providers.AWS + elif mailer_config.get('queue_url', '').startswith('projects'): + return Providers.GCP + else: + return Providers.AWS def kms_decrypt(config, logger, session, encrypted_field): @@ -410,6 +412,9 @@ def decrypt(config, logger, session, encrypted_field): if provider == Providers.Azure: from c7n_mailer.azure_mailer.utils import azure_decrypt return azure_decrypt(config, logger, session, encrypted_field) + elif provider == Providers.GCP: + from c7n_mailer.gcp_mailer.utils import gcp_decrypt + return gcp_decrypt(config, logger, encrypted_field) elif provider == Providers.AWS: return kms_decrypt(config, logger, session, encrypted_field) else: diff --git a/tools/c7n_mailer/c7n_mailer/utils_email.py b/tools/c7n_mailer/c7n_mailer/utils_email.py index aac60a83141..6cdb552cd20 100644 --- a/tools/c7n_mailer/c7n_mailer/utils_email.py +++ b/tools/c7n_mailer/c7n_mailer/utils_email.py @@ -1,125 +1,125 @@ -# Copyright The Cloud Custodian Authors. -# SPDX-License-Identifier: Apache-2.0 -import logging -from email.mime.text import MIMEText -from email.utils import parseaddr - -from .utils import ( - get_message_subject, get_rendered_jinja) - -logger = logging.getLogger('c7n_mailer.utils.email') - - -# Those headers are defined as follows: -# 'X-Priority': 1 (Highest), 2 (High), 3 (Normal), 4 (Low), 5 (Lowest) -# Non-standard, cf https://people.dsv.su.se/~jpalme/ietf/ietf-mail-attributes.html -# Set by Thunderbird -# 'X-MSMail-Priority': High, Normal, Low -# Cf Microsoft https://msdn.microsoft.com/en-us/library/gg671973(v=exchg.80).aspx -# Note: May increase SPAM level on Spamassassin: -# https://wiki.apache.org/spamassassin/Rules/MISSING_MIMEOLE -# 'Priority': "normal" / "non-urgent" / "urgent" -# Cf https://tools.ietf.org/html/rfc2156#section-5.3.6 -# 'Importance': "low" / "normal" / "high" -# Cf https://tools.ietf.org/html/rfc2156#section-5.3.4 -PRIORITIES = { - '1': { - 'X-Priority': '1 (Highest)', - 'X-MSMail-Priority': 'High', - 'Priority': 'urgent', - 'Importance': 'high', - }, - '2': { - 'X-Priority': '2 (High)', - 'X-MSMail-Priority': 'High', - 'Priority': 'urgent', - 'Importance': 'high', - }, - '3': { - 'X-Priority': '3 (Normal)', - 'X-MSMail-Priority': 'Normal', - 'Priority': 'normal', - 'Importance': 'normal', - }, - '4': { - 'X-Priority': '4 (Low)', - 'X-MSMail-Priority': 'Low', - 'Priority': 'non-urgent', - 'Importance': 'low', - }, - '5': { - 'X-Priority': '5 (Lowest)', - 'X-MSMail-Priority': 'Low', - 'Priority': 'non-urgent', - 'Importance': 'low', - } -} - - -def is_email(target): - if target is None: - return False - if target.startswith('slack://'): - logger.debug("Slack payload, not an email.") - return False - if parseaddr(target)[1] and '@' in target and '.' in target: - return True - else: - return False - - -def priority_header_is_valid(priority_header, logger): - try: - priority_header_int = int(priority_header) - except ValueError: - return False - if priority_header_int and 0 < int(priority_header_int) < 6: - return True - else: - logger.warning('mailer priority_header is not a valid string from 1 to 5') - return False - - -def set_mimetext_headers( - message, subject, from_addr, to_addrs, cc_addrs, additional_headers, - priority, logger -): - """Sets headers on Mimetext message""" - - message['Subject'] = subject - message['From'] = from_addr - message['To'] = ', '.join(to_addrs) - if cc_addrs: - message['Cc'] = ', '.join(cc_addrs) - if additional_headers: - for k, v in additional_headers.items(): - message[k] = v - - if priority and priority_header_is_valid(priority, logger): - priority = PRIORITIES[str(priority)].copy() - for key in priority: - message[key] = priority[key] - - return message - - -def get_mimetext_message(config, logger, message, resources, to_addrs): - body = get_rendered_jinja( - to_addrs, message, resources, logger, - 'template', 'default', config['templates_folders']) - - email_format = message['action'].get('template_format', None) - if not email_format: - email_format = message['action'].get( - 'template', 'default').endswith('html') and 'html' or 'plain' - - return set_mimetext_headers( - message=MIMEText(body, email_format, 'utf-8'), - subject=get_message_subject(message), - from_addr=message['action'].get('from', config['from_address']), - to_addrs=to_addrs, - cc_addrs=message['action'].get('cc', []), - additional_headers=config.get('additional_email_headers', {}), - priority=message['action'].get('priority_header', None), - logger=logger - ) +# Copyright The Cloud Custodian Authors. +# SPDX-License-Identifier: Apache-2.0 +import logging +from email.mime.text import MIMEText +from email.utils import parseaddr + +from .utils import ( + get_message_subject, get_rendered_jinja) + +logger = logging.getLogger('c7n_mailer.utils.email') + + +# Those headers are defined as follows: +# 'X-Priority': 1 (Highest), 2 (High), 3 (Normal), 4 (Low), 5 (Lowest) +# Non-standard, cf https://people.dsv.su.se/~jpalme/ietf/ietf-mail-attributes.html +# Set by Thunderbird +# 'X-MSMail-Priority': High, Normal, Low +# Cf Microsoft https://msdn.microsoft.com/en-us/library/gg671973(v=exchg.80).aspx +# Note: May increase SPAM level on Spamassassin: +# https://wiki.apache.org/spamassassin/Rules/MISSING_MIMEOLE +# 'Priority': "normal" / "non-urgent" / "urgent" +# Cf https://tools.ietf.org/html/rfc2156#section-5.3.6 +# 'Importance': "low" / "normal" / "high" +# Cf https://tools.ietf.org/html/rfc2156#section-5.3.4 +PRIORITIES = { + '1': { + 'X-Priority': '1 (Highest)', + 'X-MSMail-Priority': 'High', + 'Priority': 'urgent', + 'Importance': 'high', + }, + '2': { + 'X-Priority': '2 (High)', + 'X-MSMail-Priority': 'High', + 'Priority': 'urgent', + 'Importance': 'high', + }, + '3': { + 'X-Priority': '3 (Normal)', + 'X-MSMail-Priority': 'Normal', + 'Priority': 'normal', + 'Importance': 'normal', + }, + '4': { + 'X-Priority': '4 (Low)', + 'X-MSMail-Priority': 'Low', + 'Priority': 'non-urgent', + 'Importance': 'low', + }, + '5': { + 'X-Priority': '5 (Lowest)', + 'X-MSMail-Priority': 'Low', + 'Priority': 'non-urgent', + 'Importance': 'low', + } +} + + +def is_email(target): + if target is None: + return False + if target.startswith('slack://'): + logger.debug("Slack payload, not an email.") + return False + if parseaddr(target)[1] and '@' in target and '.' in target: + return True + else: + return False + + +def priority_header_is_valid(priority_header, logger): + try: + priority_header_int = int(priority_header) + except ValueError: + return False + if priority_header_int and 0 < int(priority_header_int) < 6: + return True + else: + logger.warning('mailer priority_header is not a valid string from 1 to 5') + return False + + +def set_mimetext_headers( + message, subject, from_addr, to_addrs, cc_addrs, additional_headers, + priority, logger +): + """Sets headers on Mimetext message""" + + message['Subject'] = subject + message['From'] = from_addr + message['To'] = ', '.join(to_addrs) + if cc_addrs: + message['Cc'] = ', '.join(cc_addrs) + if additional_headers: + for k, v in additional_headers.items(): + message[k] = v + + if priority and priority_header_is_valid(priority, logger): + priority = PRIORITIES[str(priority)].copy() + for key in priority: + message[key] = priority[key] + + return message + + +def get_mimetext_message(config, logger, message, resources, to_addrs): + body = get_rendered_jinja( + to_addrs, message, resources, logger, + 'template', 'default', config['templates_folders']) + + email_format = message['action'].get('template_format', None) + if not email_format: + email_format = message['action'].get( + 'template', 'default').endswith('html') and 'html' or 'plain' + + return set_mimetext_headers( + message=MIMEText(body, email_format, 'utf-8'), + subject=get_message_subject(message), + from_addr=message['action'].get('from', config['from_address']), + to_addrs=to_addrs, + cc_addrs=message['action'].get('cc', []), + additional_headers=config.get('additional_email_headers', {}), + priority=message['action'].get('priority_header', None), + logger=logger + ) diff --git a/tools/c7n_mailer/poetry.lock b/tools/c7n_mailer/poetry.lock index b7f1a91f058..ddddae7821c 100644 --- a/tools/c7n_mailer/poetry.lock +++ b/tools/c7n_mailer/poetry.lock @@ -20,16 +20,39 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] +[[package]] +name = "black" +version = "22.6.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "boto3" -version = "1.24.44" +version = "1.24.55" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.27.44,<1.28.0" +botocore = ">=1.27.55,<1.28.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -38,7 +61,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.27.44" +version = "1.27.55" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -50,7 +73,15 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.13.8)"] +crt = ["awscrt (==0.14.0)"] + +[[package]] +name = "cachetools" +version = "5.2.0" +description = "Extensible memoizing collections and decorators" +category = "main" +optional = false +python-versions = "~=3.7" [[package]] name = "certifi" @@ -71,6 +102,18 @@ python-versions = ">=3.6.0" [package.extras] unicode_backport = ["unicodedata2"] +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + [[package]] name = "colorama" version = "0.4.5" @@ -113,8 +156,118 @@ six = ">=1.16.0,<2.0.0" sortedcontainers = ">=2.4.0,<3.0.0" [package.extras] -aioredis = ["aioredis (>=2.0.1,<3.0.0)"] lua = ["lupa (>=1.13,<2.0)"] +aioredis = ["aioredis (>=2.0.1,<3.0.0)"] + +[[package]] +name = "google-api-core" +version = "2.8.2" +description = "Google API client core library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +google-auth = ">=1.25.0,<3.0dev" +googleapis-common-protos = ">=1.56.2,<2.0dev" +grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} +grpcio-status = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.15.0,<5.0.0dev" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] + +[[package]] +name = "google-auth" +version = "2.10.0" +description = "Google Authentication Library" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""} +six = ">=1.9.0" + +[package.extras] +reauth = ["pyu2f (>=0.1.5)"] +pyopenssl = ["pyopenssl (>=20.0.0)"] +enterprise_cert = ["pyopenssl (==22.0.0)", "cryptography (==36.0.2)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] + +[[package]] +name = "google-cloud-secret-manager" +version = "2.12.3" +description = "Secret Manager API API client library" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +google-api-core = {version = ">=1.32.0,<2.0.0 || >=2.8.0,<3.0.0dev", extras = ["grpc"]} +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = ">=1.22.0,<2.0.0dev" +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +libcst = ["libcst (>=0.2.5)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.56.4" +description = "Common protobufs used in Google APIs" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +grpcio = {version = ">=1.0.0,<2.0.0dev", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.15.0,<5.0.0dev" + +[package.extras] +grpc = ["grpcio (>=1.0.0,<2.0.0dev)"] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.12.4" +description = "IAM API client library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} +grpcio = ">=1.0.0,<2.0.0dev" + +[[package]] +name = "grpcio" +version = "1.48.0" +description = "HTTP/2-based RPC framework" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +six = ">=1.5.2" + +[package.extras] +protobuf = ["grpcio-tools (>=1.48.0)"] + +[[package]] +name = "grpcio-status" +version = "1.48.0" +description = "Status proto mapping for gRPC" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.48.0" +protobuf = ">=3.12.0" [[package]] name = "idna" @@ -153,8 +306,8 @@ python-versions = ">=3.7" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +testing = ["pytest-mypy (>=0.9.1)", "pytest-black (>=0.3.7)", "pytest-enabler (>=1.3)", "pytest-cov", "pytest-flake8", "pytest-checkdocs (>=2.4)", "pytest (>=6)"] +docs = ["jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "jaraco.packaging (>=9)", "sphinx"] [[package]] name = "iniconfig" @@ -207,7 +360,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "jsonschema" -version = "4.9.0" +version = "4.13.0" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false @@ -222,8 +375,8 @@ pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["webcolors (>=1.11)", "uri-template", "rfc3986-validator (>0.1.0)", "rfc3339-validator", "jsonpointer (>1.13)", "isoduration", "idna", "fqdn"] +format = ["webcolors (>=1.11)", "uri-template", "rfc3987", "rfc3339-validator", "jsonpointer (>1.13)", "isoduration", "idna", "fqdn"] [[package]] name = "ldap3" @@ -244,6 +397,32 @@ category = "main" optional = false python-versions = ">=3.7" +[[package]] +name = "mypy" +version = "0.931" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = ">=1.1.0" +typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "packaging" version = "21.3" @@ -255,6 +434,14 @@ python-versions = ">=3.6" [package.dependencies] pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +[[package]] +name = "pathspec" +version = "0.9.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + [[package]] name = "pkgutil-resolve-name" version = "1.3.10" @@ -263,6 +450,18 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "platformdirs" +version = "2.5.2" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] + [[package]] name = "pluggy" version = "1.0.0" @@ -278,6 +477,28 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "proto-plus" +version = "1.22.0" +description = "Beautiful, Pythonic protocol buffers." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "4.21.5" +description = "" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "py" version = "1.11.0" @@ -294,6 +515,17 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "pyasn1-modules" +version = "0.2.8" +description = "A collection of ASN.1-based protocols modules." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.5.0" + [[package]] name = "pyparsing" version = "3.0.9" @@ -391,6 +623,17 @@ urllib3 = ">=1.21.1,<1.27" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +category = "main" +optional = false +python-versions = ">=3.6,<4" + +[package.dependencies] +pyasn1 = ">=0.1.3" + [[package]] name = "s3transfer" version = "0.6.0" @@ -449,6 +692,30 @@ category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "typed-ast" +version = "1.5.4" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "types-six" +version = "1.16.18" +description = "Typing stubs for six" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "typing-extensions" version = "4.3.0" @@ -466,9 +733,9 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +secure = ["ipaddress", "certifi", "idna (>=2.0.0)", "cryptography (>=1.3.4)", "pyOpenSSL (>=0.14)"] +brotli = ["brotlipy (>=0.6.0)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] [[package]] name = "zipp" @@ -479,24 +746,50 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +testing = ["pytest-mypy (>=0.9.1)", "pytest-black (>=0.3.7)", "func-timeout", "jaraco.itertools", "pytest-enabler (>=1.3)", "pytest-cov", "pytest-flake8", "pytest-checkdocs (>=2.4)", "pytest (>=6)"] +docs = ["jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "jaraco.packaging (>=9)", "sphinx"] [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "0af1804423b1f01cc98ba5a7e9efc33b991e33a4687c7dffe601943abf01f6cd" +content-hash = "4f8e0cb865863b90cc7ec8d1cff7eeacb791aff1612f6dd266d495c1d8f5308c" [metadata.files] -atomicwrites = [] -attrs = [] -boto3 = [] -botocore = [] +atomicwrites = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] +attrs = [ + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, +] +black = [ + {file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"}, + {file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"}, +] +boto3 = [ + {file = "boto3-1.24.55-py3-none-any.whl", hash = "sha256:4b620f55f3015c516a8f8063b02060a7bb9a763e10de3c0f3ec90102cdfa28db"}, + {file = "boto3-1.24.55.tar.gz", hash = "sha256:9fe6c7c5019671cbea82f02dbaae7e743ec86187443ab5f333ebb3d3bef63dce"}, +] +botocore = [ + {file = "botocore-1.27.55-py3-none-any.whl", hash = "sha256:0b4a17e81c17845245c0e7a3fbf83753c7f6a5544b93dcf6e0fcc0f3f2156ab2"}, + {file = "botocore-1.27.55.tar.gz", hash = "sha256:929d6be4bdb33a693e6c8e06383dba76fa628bb72fdb1f9353fd13f5d115dd19"}, +] +cachetools = [ + {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, + {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, +] certifi = [ {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, ] -charset-normalizer = [] +charset-normalizer = [ + {file = "charset-normalizer-2.1.0.tar.gz", hash = "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"}, + {file = "charset_normalizer-2.1.0-py3-none-any.whl", hash = "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] colorama = [ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, @@ -509,7 +802,82 @@ decorator = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] -fakeredis = [] +fakeredis = [ + {file = "fakeredis-1.9.0-py3-none-any.whl", hash = "sha256:868467ff399520fc77e37ff002c60d1b2a1674742982e27338adaeebcc537648"}, + {file = "fakeredis-1.9.0.tar.gz", hash = "sha256:60639946e3bb1274c30416f539f01f9d73b4ea68c244c1442f5524e45f51e882"}, +] +google-api-core = [ + {file = "google-api-core-2.8.2.tar.gz", hash = "sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc"}, + {file = "google_api_core-2.8.2-py3-none-any.whl", hash = "sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50"}, +] +google-auth = [ + {file = "google-auth-2.10.0.tar.gz", hash = "sha256:7904dbd44b745c7323fef29565adee2fe7ff48473e2d94443aced40b0404a395"}, + {file = "google_auth-2.10.0-py2.py3-none-any.whl", hash = "sha256:1deba4a54f95ef67b4139eaf5c20eaa7047215eec9f6a2344599b8596db8863b"}, +] +google-cloud-secret-manager = [ + {file = "google-cloud-secret-manager-2.12.3.tar.gz", hash = "sha256:e7dd59fc920cc08c23863e15289294cedbb614cba977252e2a6c7151f2bf4cb1"}, + {file = "google_cloud_secret_manager-2.12.3-py2.py3-none-any.whl", hash = "sha256:85e401250cb88a95eb001168af8d31ed3b331df95083954a59c778609634e2bc"}, +] +googleapis-common-protos = [ + {file = "googleapis-common-protos-1.56.4.tar.gz", hash = "sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417"}, + {file = "googleapis_common_protos-1.56.4-py2.py3-none-any.whl", hash = "sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394"}, +] +grpc-google-iam-v1 = [ + {file = "grpc-google-iam-v1-0.12.4.tar.gz", hash = "sha256:3f0ac2c940b9a855d7ce7e31fde28bddb0d9ac362d32d07c67148306931a0e30"}, + {file = "grpc_google_iam_v1-0.12.4-py2.py3-none-any.whl", hash = "sha256:312801ae848aeb8408c099ea372b96d253077e7851aae1a9e745df984f81f20c"}, +] +grpcio = [ + {file = "grpcio-1.48.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:4a049a032144641ed5d073535c0dc69eb6029187cc729a66946c86dcc8eec3a1"}, + {file = "grpcio-1.48.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:f8bc76f5cd95f5476e5285fe5d3704a9332586a569fbbccef551b0b6f7a270f9"}, + {file = "grpcio-1.48.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:448d397fe88e9fef8170f019b86abdc4d554ae311aaf4dbff1532fde227d3308"}, + {file = "grpcio-1.48.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f9b6b6f7c83869d2316c5d13f953381881a16741275a34ec5ed5762f11b206e"}, + {file = "grpcio-1.48.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bd8541c4b6b43c9024496d30b4a12346325d3a17a1f3c80ad8924caed1e35c3"}, + {file = "grpcio-1.48.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:877d33aeba05ae0b9e81761a694914ed33613f655c35f6bbcf4ebbcb984e0167"}, + {file = "grpcio-1.48.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cd01a8201fd8ab2ce496f7e65975da1f1e629eac8eea84ead0fd77e32e4350cd"}, + {file = "grpcio-1.48.0-cp310-cp310-win32.whl", hash = "sha256:0388da923dff58ba7f711233e41c2b749b5817b8e0f137a107672d9c15a1009c"}, + {file = "grpcio-1.48.0-cp310-cp310-win_amd64.whl", hash = "sha256:8dcffdb8921fd88857ae350fd579277a5f9315351e89ed9094ef28927a46d40d"}, + {file = "grpcio-1.48.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:2138c50331232f56178c2b36dcfa6ad67aad705fe410955f3b2a53d722191b89"}, + {file = "grpcio-1.48.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:af2d80f142da2a6af45204a5ca2374e2747af07a99de54a1164111e169a761ff"}, + {file = "grpcio-1.48.0-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:59284bd4cdf47c147c26d91aca693765318d524328f6ece2a1a0b85a12a362af"}, + {file = "grpcio-1.48.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc3ebfe356c0c6750379cd194bf2b7e5d1d2f29db1832358f05a73e9290db98c"}, + {file = "grpcio-1.48.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc2619a31339e1c53731f54761f1a2cb865d3421f690e00ef3e92f90d2a0c5ae"}, + {file = "grpcio-1.48.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7df637405de328a54c1c8c08a3206f974c7a577730f90644af4c3400b7bfde2d"}, + {file = "grpcio-1.48.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:9e73b95969a579798bfbeb85d376695cce5172357fb52e450467ceb8e7365152"}, + {file = "grpcio-1.48.0-cp36-cp36m-win32.whl", hash = "sha256:059e9d58b5aba7fb9eabe3a4d2ac49e1dcbc2b54b0f166f6475e40b7f4435343"}, + {file = "grpcio-1.48.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7cebcf645170f0c82ef71769544f9ac4515993a4d367f5900aba2eb4ecd2a32f"}, + {file = "grpcio-1.48.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:8af3a8845df35b838104d6fb1ae7f4969d248cf037fa2794916d31e917346f72"}, + {file = "grpcio-1.48.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:a1ef40975ec9ced6c17ce7fbec9825823da782fa606f0b92392646ff3886f198"}, + {file = "grpcio-1.48.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:7cccbf6db31f2a78e1909047ff69620f94a4e6e53251858e9502fbbff5714b48"}, + {file = "grpcio-1.48.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f3f142579f58def64c0850f0bb0eb1b425ae885f5669dda5b73ade64ad2b753"}, + {file = "grpcio-1.48.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:656c6f6f7b815bca3054780b8cdfa1e4e37cd36c887a48558d00c2cf85f31697"}, + {file = "grpcio-1.48.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cba4538e8a2ef123ea570e7b1d62162e158963c2471e35d79eb9690c971a10c0"}, + {file = "grpcio-1.48.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9daa67820fafceec6194ed1686c1783816e62d6756ff301ba93e682948836846"}, + {file = "grpcio-1.48.0-cp37-cp37m-win32.whl", hash = "sha256:7ec264a7fb413e0c804a7a48a6f7d7212742955a60724c44d793da35a8f30873"}, + {file = "grpcio-1.48.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a2b1b33b92359388b8164807313dcbb3317101b038a5d54342982560329d958f"}, + {file = "grpcio-1.48.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:7b820696a5ce7b98f459f234698cb323f89b355373789188efa126d7f47a2a92"}, + {file = "grpcio-1.48.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:e4dfae66ebc165c46c5b7048eb554472ee72fbaab2c2c2da7f9b1621c81e077c"}, + {file = "grpcio-1.48.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f7115038edce33b494e0138b0bd31a2eb6595d45e2eed23be46bc32886feb741"}, + {file = "grpcio-1.48.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4e996282238943ca114628255be61980e38b25f73a08ae2ffd02b63eaf70d3a"}, + {file = "grpcio-1.48.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13dad31f5155fa555d393511cc8108c41b1b5b54dc4c24c27d4694ddd7a78fad"}, + {file = "grpcio-1.48.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c84b9d90b2641963de98b35bb7a2a51f78119fe5bd00ef27246ba9f4f0835e36"}, + {file = "grpcio-1.48.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41b65166779d7dafac4c98380ac19f690f1c5fe18083a71d370df87b24dd30ff"}, + {file = "grpcio-1.48.0-cp38-cp38-win32.whl", hash = "sha256:b890e5f5fbc21cb994894f73ecb2faaa66697d8debcb228a5adb0622b9bec3b2"}, + {file = "grpcio-1.48.0-cp38-cp38-win_amd64.whl", hash = "sha256:5fe3af539d2f50891ed93aed3064ffbcc38bf848aa3f7ed1fbedcce139c57302"}, + {file = "grpcio-1.48.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:a4ed57f4e3d91259551e6765782b22d9e8b8178fec43ebf8e1b2c392c4ced37b"}, + {file = "grpcio-1.48.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:60843d8184e171886dd7a93d6672e2ef0b08dfd4f88da7421c10b46b6e031ac4"}, + {file = "grpcio-1.48.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:0ecba22f25ccde2442be7e7dd7fa746905d628f03312b4a0c9961f0d99771f53"}, + {file = "grpcio-1.48.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34f5917f0c49a04633dc12d483c8aee6f6d9f69133b700214d3703f72a72f501"}, + {file = "grpcio-1.48.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4c4ad8ad7e2cf3a272cbc96734d56635e6543939022f17e0c4487f7d2a45bf9"}, + {file = "grpcio-1.48.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:111fb2f5f4a069f331ae23106145fd16dd4e1112ca223858a922068614dac6d2"}, + {file = "grpcio-1.48.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:beb0573daa49889efcfea0a6e995b4f39d481aa1b94e1257617406ef417b56a6"}, + {file = "grpcio-1.48.0-cp39-cp39-win32.whl", hash = "sha256:ce70254a082cb767217b2fdee374cc79199d338d46140753438cd6d67c609b2f"}, + {file = "grpcio-1.48.0-cp39-cp39-win_amd64.whl", hash = "sha256:ae3fd135666448058fe277d93c10e0f18345fbcbb015c4642de2fa3db6f0c205"}, + {file = "grpcio-1.48.0.tar.gz", hash = "sha256:eaf4bb73819863440727195411ab3b5c304f6663625e66f348e91ebe0a039306"}, +] +grpcio-status = [ + {file = "grpcio-status-1.48.0.tar.gz", hash = "sha256:afac961fc3713889d3c48c11461aba49842ca62a54dfe8f346442046036e9856"}, + {file = "grpcio_status-1.48.0-py3-none-any.whl", hash = "sha256:34808aa954e829c4600de3324ec53b5fa2c934f58c3d55586959b9d6989f0d5b"}, +] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, @@ -518,7 +886,10 @@ importlib-metadata = [ {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, ] -importlib-resources = [] +importlib-resources = [ + {file = "importlib_resources-5.9.0-py3-none-any.whl", hash = "sha256:f78a8df21a79bcc30cfd400bdc38f314333de7c0fb619763f6b9dabab8268bb7"}, + {file = "importlib_resources-5.9.0.tar.gz", hash = "sha256:5481e97fb45af8dcf2f798952625591c58fe599d0735d86b10f54de086a61681"}, +] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -527,7 +898,10 @@ jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] -jmespath = [] +jmespath = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] jsonpatch = [ {file = "jsonpatch-1.32-py2.py3-none-any.whl", hash = "sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397"}, {file = "jsonpatch-1.32.tar.gz", hash = "sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2"}, @@ -536,7 +910,10 @@ jsonpointer = [ {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, ] -jsonschema = [] +jsonschema = [ + {file = "jsonschema-4.13.0-py3-none-any.whl", hash = "sha256:870a61bb45050b81103faf6a4be00a0a906e06636ffcf0b84f5a2e51faf901ff"}, + {file = "jsonschema-4.13.0.tar.gz", hash = "sha256:3776512df4f53f74e6e28fe35717b5b223c1756875486984a31bc9165e7fc920"}, +] ldap3 = [ {file = "ldap3-2.9.1-py2.6.egg", hash = "sha256:5ab7febc00689181375de40c396dcad4f2659cd260fc5e94c508b6d77c17e9d5"}, {file = "ldap3-2.9.1-py2.7.egg", hash = "sha256:2bc966556fc4d4fa9f445a1c31dc484ee81d44a51ab0e2d0fd05b62cac75daa6"}, @@ -586,15 +963,72 @@ markupsafe = [ {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] +mypy = [ + {file = "mypy-0.931-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c5b42d0815e15518b1f0990cff7a705805961613e701db60387e6fb663fe78a"}, + {file = "mypy-0.931-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c89702cac5b302f0c5d33b172d2b55b5df2bede3344a2fbed99ff96bddb2cf00"}, + {file = "mypy-0.931-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:300717a07ad09525401a508ef5d105e6b56646f7942eb92715a1c8d610149714"}, + {file = "mypy-0.931-cp310-cp310-win_amd64.whl", hash = "sha256:7b3f6f557ba4afc7f2ce6d3215d5db279bcf120b3cfd0add20a5d4f4abdae5bc"}, + {file = "mypy-0.931-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1bf752559797c897cdd2c65f7b60c2b6969ffe458417b8d947b8340cc9cec08d"}, + {file = "mypy-0.931-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4365c60266b95a3f216a3047f1d8e3f895da6c7402e9e1ddfab96393122cc58d"}, + {file = "mypy-0.931-cp36-cp36m-win_amd64.whl", hash = "sha256:1b65714dc296a7991000b6ee59a35b3f550e0073411ac9d3202f6516621ba66c"}, + {file = "mypy-0.931-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e839191b8da5b4e5d805f940537efcaa13ea5dd98418f06dc585d2891d228cf0"}, + {file = "mypy-0.931-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:50c7346a46dc76a4ed88f3277d4959de8a2bd0a0fa47fa87a4cde36fe247ac05"}, + {file = "mypy-0.931-cp37-cp37m-win_amd64.whl", hash = "sha256:d8f1ff62f7a879c9fe5917b3f9eb93a79b78aad47b533911b853a757223f72e7"}, + {file = "mypy-0.931-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9fe20d0872b26c4bba1c1be02c5340de1019530302cf2dcc85c7f9fc3252ae0"}, + {file = "mypy-0.931-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1b06268df7eb53a8feea99cbfff77a6e2b205e70bf31743e786678ef87ee8069"}, + {file = "mypy-0.931-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8c11003aaeaf7cc2d0f1bc101c1cc9454ec4cc9cb825aef3cafff8a5fdf4c799"}, + {file = "mypy-0.931-cp38-cp38-win_amd64.whl", hash = "sha256:d9d2b84b2007cea426e327d2483238f040c49405a6bf4074f605f0156c91a47a"}, + {file = "mypy-0.931-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ff3bf387c14c805ab1388185dd22d6b210824e164d4bb324b195ff34e322d166"}, + {file = "mypy-0.931-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5b56154f8c09427bae082b32275a21f500b24d93c88d69a5e82f3978018a0266"}, + {file = "mypy-0.931-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ca7f8c4b1584d63c9a0f827c37ba7a47226c19a23a753d52e5b5eddb201afcd"}, + {file = "mypy-0.931-cp39-cp39-win_amd64.whl", hash = "sha256:74f7eccbfd436abe9c352ad9fb65872cc0f1f0a868e9d9c44db0893440f0c697"}, + {file = "mypy-0.931-py3-none-any.whl", hash = "sha256:1171f2e0859cfff2d366da2c7092b06130f232c636a3f7301e3feb8b41f6377d"}, + {file = "mypy-0.931.tar.gz", hash = "sha256:0038b21890867793581e4cb0d810829f5fd4441aa75796b53033af3aa30430ce"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] -pkgutil-resolve-name = [] +pathspec = [ + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, +] +pkgutil-resolve-name = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] +platformdirs = [ + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, +] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] +proto-plus = [ + {file = "proto-plus-1.22.0.tar.gz", hash = "sha256:c2e6693fdf68c405a6428226915a8625d21d0513793598ae3287a1210478d8ec"}, + {file = "proto_plus-1.22.0-py3-none-any.whl", hash = "sha256:a27192d8cdc54e044f137b4c9053c9108cf5c065b46d067f1bcd389a911faf5b"}, +] +protobuf = [ + {file = "protobuf-4.21.5-cp310-abi3-win32.whl", hash = "sha256:5310cbe761e87f0c1decce019d23f2101521d4dfff46034f8a12a53546036ec7"}, + {file = "protobuf-4.21.5-cp310-abi3-win_amd64.whl", hash = "sha256:e5c5a2886ae48d22a9d32fbb9b6636a089af3cd26b706750258ce1ca96cc0116"}, + {file = "protobuf-4.21.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ee04f5823ed98bb9a8c3b1dc503c49515e0172650875c3f76e225b223793a1f2"}, + {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:b04484d6f42f48c57dd2737a72692f4c6987529cdd148fb5b8e5f616862a2e37"}, + {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e0b272217aad8971763960238c1a1e6a65d50ef7824e23300da97569a251c55"}, + {file = "protobuf-4.21.5-cp37-cp37m-win32.whl", hash = "sha256:5eb0724615e90075f1d763983e708e1cef08e66b1891d8b8b6c33bc3b2f1a02b"}, + {file = "protobuf-4.21.5-cp37-cp37m-win_amd64.whl", hash = "sha256:011c0f267e85f5d73750b6c25f0155d5db1e9443cd3590ab669a6221dd8fcdb0"}, + {file = "protobuf-4.21.5-cp38-cp38-win32.whl", hash = "sha256:7b6f22463e2d1053d03058b7b4ceca6e4ed4c14f8c286c32824df751137bf8e7"}, + {file = "protobuf-4.21.5-cp38-cp38-win_amd64.whl", hash = "sha256:b52e7a522911a40445a5f588bd5b5e584291bfc5545e09b7060685e4b2ff814f"}, + {file = "protobuf-4.21.5-cp39-cp39-win32.whl", hash = "sha256:a7faa62b183d6a928e3daffd06af843b4287d16ef6e40f331575ecd236a7974d"}, + {file = "protobuf-4.21.5-cp39-cp39-win_amd64.whl", hash = "sha256:5e0ce02418ef03d7657a420ae8fd6fec4995ac713a3cb09164e95f694dbcf085"}, + {file = "protobuf-4.21.5-py2.py3-none-any.whl", hash = "sha256:bf711b451212dc5b0fa45ae7dada07d8e71a4b0ff0bc8e4783ee145f47ac4f82"}, + {file = "protobuf-4.21.5-py3-none-any.whl", hash = "sha256:3ec6f5b37935406bb9df9b277e79f8ed81d697146e07ef2ba8a5a272fb24b2c9"}, + {file = "protobuf-4.21.5.tar.gz", hash = "sha256:eb1106e87e095628e96884a877a51cdb90087106ee693925ec0a300468a9be3a"}, +] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -614,6 +1048,21 @@ pyasn1 = [ {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, ] +pyasn1-modules = [ + {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, + {file = "pyasn1_modules-0.2.8-py2.4.egg", hash = "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199"}, + {file = "pyasn1_modules-0.2.8-py2.5.egg", hash = "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405"}, + {file = "pyasn1_modules-0.2.8-py2.6.egg", hash = "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb"}, + {file = "pyasn1_modules-0.2.8-py2.7.egg", hash = "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8"}, + {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, + {file = "pyasn1_modules-0.2.8-py3.1.egg", hash = "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d"}, + {file = "pyasn1_modules-0.2.8-py3.2.egg", hash = "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45"}, + {file = "pyasn1_modules-0.2.8-py3.3.egg", hash = "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4"}, + {file = "pyasn1_modules-0.2.8-py3.4.egg", hash = "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811"}, + {file = "pyasn1_modules-0.2.8-py3.5.egg", hash = "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed"}, + {file = "pyasn1_modules-0.2.8-py3.6.egg", hash = "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0"}, + {file = "pyasn1_modules-0.2.8-py3.7.egg", hash = "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd"}, +] pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, @@ -692,7 +1141,14 @@ redis = [ {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, ] -requests = [] +requests = [ + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +] +rsa = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] s3transfer = [ {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, @@ -716,6 +1172,49 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -typing-extensions = [] -urllib3 = [] -zipp = [] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +typed-ast = [ + {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, + {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, + {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, + {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, + {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, + {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, + {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, + {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, + {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, +] +types-six = [ + {file = "types-six-1.16.18.tar.gz", hash = "sha256:26f481fabb65321ba428bdfb82c97fc638e00be6b20efa83915b007cf3893e28"}, + {file = "types_six-1.16.18-py3-none-any.whl", hash = "sha256:4f8be25faaa0a3088d6380ab7f27b4c7bc74ba81b2c7a351729f5be4af438290"}, +] +typing-extensions = [ + {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, + {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, +] +urllib3 = [ + {file = "urllib3-1.26.11-py2.py3-none-any.whl", hash = "sha256:c33ccba33c819596124764c23a97d25f32b28433ba0dedeb77d873a38722c9bc"}, + {file = "urllib3-1.26.11.tar.gz", hash = "sha256:ea6e8fb210b19d950fab93b60c9009226c63a28808bc8386e05301e25883ac0a"}, +] +zipp = [ + {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, + {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, +] diff --git a/tools/c7n_mailer/pyproject.toml b/tools/c7n_mailer/pyproject.toml index 9060ac8b775..99e76cf7491 100644 --- a/tools/c7n_mailer/pyproject.toml +++ b/tools/c7n_mailer/pyproject.toml @@ -31,11 +31,15 @@ ldap3 = "^2.6.1" redis = "^3.4.1" jsonpointer = "^2.0" jsonpatch = "^1.25" +types-six = "^1.16.10" +google-cloud-secret-manager = "^2.8.0" importlib-metadata = ">=4.11.1" [tool.poetry.dev-dependencies] fakeredis = "^1.2.0" pytest = "^6.0.0" +mypy = "^0.931" +black = "^22.1.0" [build-system] requires = ["poetry>=0.12", "setuptools"] diff --git a/tools/c7n_mailer/setup.py b/tools/c7n_mailer/setup.py index 0ef7fc0cd66..c723a103505 100644 --- a/tools/c7n_mailer/setup.py +++ b/tools/c7n_mailer/setup.py @@ -4,7 +4,7 @@ from setuptools import setup packages = \ -['c7n_mailer', 'c7n_mailer.azure_mailer'] +['c7n_mailer', 'c7n_mailer.azure_mailer', 'c7n_mailer.gcp_mailer'] package_data = \ {'': ['*'], 'c7n_mailer': ['msg-templates/*']} @@ -29,7 +29,7 @@ setup_kwargs = { 'name': 'c7n-mailer', - 'version': '0.6.17', + 'version': '0.7.0', 'description': 'Cloud Custodian - Reference Mailer', 'license': 'Apache-2.0', 'classifiers': [ diff --git a/tools/c7n_mailer/tests/common.py b/tools/c7n_mailer/tests/common.py index 1c2519d67d0..6f66865cb9d 100644 --- a/tools/c7n_mailer/tests/common.py +++ b/tools/c7n_mailer/tests/common.py @@ -9,288 +9,311 @@ from c7n_mailer.ldap_lookup import LdapLookup, Redis -logger = logging.getLogger('custodian.mailer') +logger = logging.getLogger("custodian.mailer") PETER = ( - 'uid=peter,cn=users,dc=initech,dc=com', + "uid=peter,cn=users,dc=initech,dc=com", { - 'uid': ['peter'], - 'manager': 'uid=bill_lumbergh,cn=users,dc=initech,dc=com', - 'mail': 'peter@initech.com', - 'displayName': 'Peter', - 'objectClass': 'person' - } + "uid": ["peter"], + "manager": "uid=bill_lumbergh,cn=users,dc=initech,dc=com", + "mail": "peter@initech.com", + "displayName": "Peter", + "objectClass": "person", + }, ) BILL = ( - 'uid=bill_lumbergh,cn=users,dc=initech,dc=com', + "uid=bill_lumbergh,cn=users,dc=initech,dc=com", { - 'uid': ['bill_lumbergh'], - 'mail': 'bill_lumberg@initech.com', - 'displayName': 'Bill Lumberg', - 'objectClass': 'person' - } + "uid": ["bill_lumbergh"], + "mail": "bill_lumberg@initech.com", + "displayName": "Bill Lumberg", + "objectClass": "person", + }, ) MAILER_CONFIG = { - 'smtp_port': 25, - 'from_address': 'devops@initech.com', - 'contact_tags': ['OwnerEmail', 'SupportEmail'], - 'queue_url': 'https://sqs.us-east-1.amazonaws.com/xxxx/cloudcustodian-mailer', - 'region': 'us-east-1', - 'ses_region': 'us-east-1', - 'ldap_uri': 'ldap.initech.com', - 'smtp_server': 'smtp.inittech.com', - 'cache_engine': 'sqlite', - 'role': 'arn:aws:iam::xxxx:role/cloudcustodian-mailer', - 'ldap_uid_tags': ['CreatorName', 'Owner'], - 'templates_folders': [os.path.abspath(os.path.dirname(__file__)), - os.path.abspath('/'), ''], + "smtp_port": 25, + "from_address": "devops@initech.com", + "contact_tags": ["OwnerEmail", "SupportEmail"], + "queue_url": "https://sqs.us-east-1.amazonaws.com/xxxx/cloudcustodian-mailer", + "region": "us-east-1", + "ses_region": "us-east-1", + "ldap_uri": "ldap.initech.com", + "smtp_server": "smtp.inittech.com", + "cache_engine": "sqlite", + "role": "arn:aws:iam::xxxx:role/cloudcustodian-mailer", + "ldap_uid_tags": ["CreatorName", "Owner"], + "templates_folders": [ + os.path.abspath(os.path.dirname(__file__)), + os.path.abspath("/"), + "" + ], } - MAILER_CONFIG_AZURE = { - 'queue_url': 'asq://storageaccount.queue.core.windows.net/queuename', - 'from_address': 'you@youremail.com', - 'sendgrid_api_key': 'SENDGRID_API_KEY', - 'templates_folders': [ + "queue_url": "asq://storageaccount.queue.core.windows.net/queuename", + "from_address": "you@youremail.com", + "sendgrid_api_key": "SENDGRID_API_KEY", + "templates_folders": [ + os.path.abspath(os.path.dirname(__file__)), + os.path.abspath("/"), + os.path.abspath(os.path.join(os.path.dirname(__file__), "test-templates")), + "" + ], +} + +MAILER_CONFIG_GCP = { + "smtp_port": 25, + "smtp_ssl": False, + "smtp_username": "user", + "smtp_password": "password", + "from_address": "devops@initech.com", + "queue_url": "projects/c7n-dev/subscriptions/getnotify", + "smtp_server": "smtp.inittech.com", + "templates_folders": [ os.path.abspath(os.path.dirname(__file__)), - os.path.abspath('/'), - os.path.abspath(os.path.join(os.path.dirname(__file__), 'test-templates')), + os.path.abspath("/"), + "" ], } RESOURCE_1 = { - 'AvailabilityZone': 'us-east-1a', - 'Attachments': [], - 'Tags': [ - { - 'Value': 'milton@initech.com', - 'Key': 'SupportEmail' - }, - { - 'Value': 'peter', - 'Key': 'CreatorName' - } + "AvailabilityZone": "us-east-1a", + "Attachments": [], + "Tags": [ + {"Value": "milton@initech.com", "Key": "SupportEmail"}, + {"Value": "peter", "Key": "CreatorName"}, ], - 'VolumeId': 'vol-01a0e6ea6b89f0099' + "VolumeId": "vol-01a0e6ea6b89f0099", } RESOURCE_2 = { - 'AvailabilityZone': 'us-east-1c', - 'Attachments': [], - 'Tags': [ - { - 'Value': 'milton@initech.com', - 'Key': 'SupportEmail' - }, - { - 'Value': 'peter', - 'Key': 'CreatorName' - } + "AvailabilityZone": "us-east-1c", + "Attachments": [], + "Tags": [ + {"Value": "milton@initech.com", "Key": "SupportEmail"}, + {"Value": "peter", "Key": "CreatorName"}, ], - 'VolumeId': 'vol-21a0e7ea9b19f0043', - 'Size': 8 + "VolumeId": "vol-21a0e7ea9b19f0043", + "Size": 8, } RESOURCE_3 = { - 'AvailabilityZone': 'us-east-1c', + "AvailabilityZone": "us-east-1c", "CreateTime": "2019-05-07T19:09:46.148Z", - 'Attachments': [ + "Attachments": [ { "AttachTime": "2019-05-07T19:09:46.000Z", "Device": "/dev/xvda", "InstanceId": "i-00000000000000000", "State": "attached", "VolumeId": "vol-00000000000000000", - "DeleteOnTermination": 'true' + "DeleteOnTermination": "true", } ], - 'Tags': [ - { - 'Value': 'milton@initech.com', - 'Key': 'SupportEmail' - }, - { - 'Value': 'peter', - 'Key': 'CreatorName' - } + "Tags": [ + {"Value": "milton@initech.com", "Key": "SupportEmail"}, + {"Value": "peter", "Key": "CreatorName"}, ], - 'VolumeId': 'vol-21a0e7ea9b19f0043', - 'Size': 8, - 'State': "in-use" + "VolumeId": "vol-21a0e7ea9b19f0043", + "Size": 8, + "State": "in-use", } SQS_MESSAGE_1 = { - 'account': 'core-services-dev', - 'account_id': '000000000000', - 'region': 'us-east-1', - 'action': { - 'to': ['resource-owner', 'ldap_uid_tags'], - 'email_ldap_username_manager': True, - 'template': '', - 'priority_header': '1', - 'type': 'notify', - 'transport': {'queue': 'xxx', 'type': 'sqs'}, - 'subject': '{{ account }} AWS EBS Volumes will be DELETED in 15 DAYS!' + "account": "core-services-dev", + "account_id": "000000000000", + "region": "us-east-1", + "action": { + "to": ["resource-owner", "ldap_uid_tags"], + "email_ldap_username_manager": True, + "template": "", + "priority_header": "1", + "type": "notify", + "transport": {"queue": "xxx", "type": "sqs"}, + "subject": "{{ account }} AWS EBS Volumes will be DELETED in 15 DAYS!", }, - 'policy': { - 'filters': [{'Attachments': []}, {'tag:maid_status': 'absent'}], - 'resource': 'ebs', - 'actions': [ + "policy": { + "filters": [{"Attachments": []}, {"tag:maid_status": "absent"}], + "resource": "ebs", + "actions": [ + {"type": "mark-for-op", "days": 15, "op": "delete"}, { - 'type': 'mark-for-op', - 'days': 15, - 'op': 'delete' + "to": ["resource-owner", "ldap_uid_tags"], + "email_ldap_username_manager": True, + "template": "", + "priority_header": "1", + "type": "notify", + "subject": "EBS Volumes will be DELETED in 15 DAYS!", }, - { - 'to': ['resource-owner', 'ldap_uid_tags'], - 'email_ldap_username_manager': True, - 'template': '', - 'priority_header': '1', - 'type': 'notify', - 'subject': 'EBS Volumes will be DELETED in 15 DAYS!' - } ], - 'comments': 'We are deleting your EBS volumes.', - 'name': 'ebs-mark-unattached-deletion' + "comments": "We are deleting your EBS volumes.", + "name": "ebs-mark-unattached-deletion", }, - 'event': None, - 'resources': [RESOURCE_1] + "event": None, + "resources": [RESOURCE_1], } SQS_MESSAGE_2 = { - 'account': 'core-services-dev', - 'account_id': '000000000000', - 'region': 'us-east-1', - 'action': { - 'type': 'notify', - 'to': ['datadog://?metric_name=EBS_volume.available.size'] - }, - 'policy': { - 'filters': [{'Attachments': []}, {'tag:maid_status': 'absent'}], - 'resource': 'ebs', - 'actions': [ - { - 'type': 'mark-for-op', - 'days': 15, - 'op': 'delete' - }, - { - 'type': 'notify', - 'to': ['datadog://?metric_name=EBS_volume.available.size'] - } + "account": "core-services-dev", + "account_id": "000000000000", + "region": "us-east-1", + "action": {"type": "notify", "to": ["datadog://?metric_name=EBS_volume.available.size"]}, + "policy": { + "filters": [{"Attachments": []}, {"tag:maid_status": "absent"}], + "resource": "ebs", + "actions": [ + {"type": "mark-for-op", "days": 15, "op": "delete"}, + {"type": "notify", "to": ["datadog://?metric_name=EBS_volume.available.size"]}, ], - 'comments': 'We are deleting your EBS volumes.', - 'name': 'ebs-mark-unattached-deletion' + "comments": "We are deleting your EBS volumes.", + "name": "ebs-mark-unattached-deletion", }, - 'event': None, - 'resources': [RESOURCE_1, RESOURCE_2] + "event": None, + "resources": [RESOURCE_1, RESOURCE_2], } SQS_MESSAGE_3 = { - 'account': 'core-services-dev', - 'account_id': '000000000000', - 'region': 'us-east-1', - 'action': { - 'type': 'notify', - 'to': ['datadog://?metric_name=EBS_volume.available.size&metric_value_tag=Size'] + "account": "core-services-dev", + "account_id": "000000000000", + "region": "us-east-1", + "action": { + "type": "notify", + "to": ["datadog://?metric_name=EBS_volume.available.size&metric_value_tag=Size"], }, - 'policy': { - 'filters': [{'Attachments': []}, {'tag:maid_status': 'absent'}], - 'resource': 'ebs', - 'actions': [ + "policy": { + "filters": [{"Attachments": []}, {"tag:maid_status": "absent"}], + "resource": "ebs", + "actions": [ + {"type": "mark-for-op", "days": 15, "op": "delete"}, { - 'type': 'mark-for-op', - 'days': 15, - 'op': 'delete' + "type": "notify", + "to": ["datadog://?metric_name=EBS_volume.available.size&metric_value_tag=Size"], }, - { - 'type': 'notify', - 'to': ['datadog://?metric_name=EBS_volume.available.size&metric_value_tag=Size'] - } ], - 'comments': 'We are deleting your EBS volumes.', - 'name': 'ebs-mark-unattached-deletion' + "comments": "We are deleting your EBS volumes.", + "name": "ebs-mark-unattached-deletion", }, - 'event': None, - 'resources': [RESOURCE_2] + "event": None, + "resources": [RESOURCE_2], } SQS_MESSAGE_4 = { - 'account': 'core-services-dev', - 'account_id': '000000000000', - 'region': 'us-east-1', - 'action': { - 'to': ['resource-owner', 'ldap_uid_tags'], - 'cc': ['hello@example.com', 'cc@example.com'], - 'email_ldap_username_manager': True, - 'template': 'default.html', - 'priority_header': '1', - 'type': 'notify', - 'transport': {'queue': 'xxx', 'type': 'sqs'}, - 'subject': '{{ account }} AWS EBS Volumes will be DELETED in 15 DAYS!' + "account": "core-services-dev", + "account_id": "000000000000", + "region": "us-east-1", + "action": { + "to": ["resource-owner", "ldap_uid_tags"], + "cc": ["hello@example.com", "cc@example.com"], + "email_ldap_username_manager": True, + "template": "default.html", + "priority_header": "1", + "type": "notify", + "transport": {"queue": "xxx", "type": "sqs"}, + "subject": "{{ account }} AWS EBS Volumes will be DELETED in 15 DAYS!", }, - 'policy': { - 'filters': [{'Attachments': []}, {'tag:maid_status': 'absent'}], - 'resource': 'ebs', - 'actions': [ + "policy": { + "filters": [{"Attachments": []}, {"tag:maid_status": "absent"}], + "resource": "ebs", + "actions": [ + {"type": "mark-for-op", "days": 15, "op": "delete"}, { - 'type': 'mark-for-op', - 'days': 15, - 'op': 'delete' + "to": ["resource-owner", "ldap_uid_tags"], + "cc": ["hello@example.com", "cc@example.com"], + "email_ldap_username_manager": True, + "template": "default.html.j2", + "priority_header": "1", + "type": "notify", + "subject": "EBS Volumes will be DELETED in 15 DAYS!", }, - { - 'to': ['resource-owner', 'ldap_uid_tags'], - 'cc': ['hello@example.com', 'cc@example.com'], - 'email_ldap_username_manager': True, - 'template': 'default.html.j2', - 'priority_header': '1', - 'type': 'notify', - 'subject': 'EBS Volumes will be DELETED in 15 DAYS!' - } ], - 'comments': 'We are deleting your EBS volumes.', - 'name': 'ebs-mark-unattached-deletion' + "comments": "We are deleting your EBS volumes.", + "name": "ebs-mark-unattached-deletion", }, - 'event': None, - 'resources': [RESOURCE_1] + "event": None, + "resources": [RESOURCE_1], } SQS_MESSAGE_5 = { - 'account': 'core-services-dev', - 'account_id': '000000000000', - 'region': 'us-east-1', - 'action': { - 'to': ['slack://#test-channel'], - 'template': 'default.html', - 'type': 'notify', - 'transport': {'queue': 'xxx', 'type': 'sqs'}, - 'subject': '{{ account }} AWS EBS Volumes will be DELETED in 15 DAYS!' + "account": "core-services-dev", + "account_id": "000000000000", + "region": "us-east-1", + "action": { + "to": ["slack://#test-channel"], + "template": "default.html", + "type": "notify", + "transport": {"queue": "xxx", "type": "sqs"}, + "subject": "{{ account }} AWS EBS Volumes will be DELETED in 15 DAYS!", }, - 'policy': { - 'filters': [{'Attachments': []}, {'tag:maid_status': 'absent'}], - 'resource': 'ebs', - 'actions': [ + "policy": { + "filters": [{"Attachments": []}, {"tag:maid_status": "absent"}], + "resource": "ebs", + "actions": [ + {"type": "mark-for-op", "days": 15, "op": "delete"}, { - 'type': 'mark-for-op', - 'days': 15, - 'op': 'delete' + "to": ["slack://tag/SlackChannel"], + "template": "slack_default.j2", + "type": "notify", + "subject": "EBS Volumes will be DELETED in 15 DAYS!", }, - { - 'to': ['slack://tag/SlackChannel'], - 'template': 'slack_default.j2', - 'type': 'notify', - 'subject': 'EBS Volumes will be DELETED in 15 DAYS!' - } ], - 'comments': 'We are deleting your EBS volumes.', - 'name': 'ebs-mark-unattached-deletion' + "comments": "We are deleting your EBS volumes.", + "name": "ebs-mark-unattached-deletion", }, - 'event': None, - 'resources': [RESOURCE_3] + "event": None, + "resources": [RESOURCE_3], } +GCP_MESSAGES = { + "receivedMessages": [ + { + "ackId": "TgQhIT4wPkVTRFAGFixdRkhRNxkIaFEOT14jPzUgKEURCAgUBXx9cURLd" + "V9bGgdRDRlyfGckOFgUBwtC" + "UXZVWxENem1cVzhUCRB1eWF8algbAwVAVH53_pGKmvCVOR1tNcH7qrdASszD_492Zho9XxJLLD5-Ki1F" + "QV5AEkwhGERJUytDCypYEQ", + "message": { + "data": "eJzVUrtuwzAM3PUVhuY6GQNk6tStX1AUgULTrgqZFCQqgBHk36tHHm6nolsHDbrDHe9EnZXGE" + "5LofUfJuSelDQAnkoMdMqZhR/2AJ/0gfqABJ8tUQONcATw7C0sGzkqTmbFQglF6YrHj0jSRU4BKTeA3Ph" + "1jOvbC3kLhR+sEQ8z0m1q5+MCfCBK31/HbKqjXw9VcXdR7jSo51N1AFl8NHgkEZ++MVHTA0SQnBc4" + "pyoRbZEtT1zRdc6xSrrY6RQzPA8/G0gZ41nWwBEPRc5DW/za4FWzq0vHXZXIddbkX+m76D9usdv+X" + "7WZ5vu1fjcAHDi+rX9JcymOV8wVn/efe", + "messageId": "549740902827570", + "publishTime": "2019-05-13T18:31:17.926Z", + }, + } + ] +} -ASQ_MESSAGE = '''{ +GCP_MESSAGE = """{ + "account": "c7n-dev", + "account_id": "c7n-dev", + "action": { + "subject": "testing notify action", + "template": "default", + "to": ["user@domain.com"], + "transport": { + "topic": "projects/c7n-dev/topics/c7n_notify", + "type": "pubsub"}, + "type": "notify"}, + "event": null, + "policy": { + "actions": [{ + "subject": "testing notify action", + "template": "default", + "to": ["user@domain.com"], + "transport": { + "topic": "projects/c7n-dev/topics/c7n_notify", + "type": "pubsub"}, + "type": "notify"}], + "filters": [{ + "name": "projects/c7n-dev/topics/c7n_notify"}], + "name": "test-notify", + "resource": "gcp.pubsub-topic"}, + "region": "all", + "resources": [{ + "c7n:MatchedFilters": ["name"], + "name": "projects/c7n-dev/topics/c7n_notify"}]}""" + +ASQ_MESSAGE = """{ "account":"subscription", "account_id":"ee98974b-5d2a-4d98-a78a-382f3715d07e", "region":"all", @@ -339,9 +362,9 @@ "id":"/subscriptions/ee98974b-5d2a-4d98-a78a-382f3715d07e/resourceGroups/test_keyvault/providers/Microsoft.KeyVault/vaults/cckeyvault1" } ] -}''' +}""" -ASQ_MESSAGE_TAG = '''{ +ASQ_MESSAGE_TAG = """{ "account":"subscription", "account_id":"ee98974b-5d2a-4d98-a78a-382f3715d07e", "region":"all", @@ -390,10 +413,10 @@ "id":"/subscriptions/ee98974b-5d2a-4d98-a78a-382f3715d07e/resourceGroups/test_keyvault/providers/Microsoft.KeyVault/vaults/cckeyvault1" } ] -}''' +}""" -ASQ_MESSAGE_SLACK = '''{ +ASQ_MESSAGE_SLACK = """{ "account":"subscription", "account_id":"ee98974b-5d2a-4d98-a78a-382f3715d07e", "region":"all", @@ -442,9 +465,9 @@ "id":"/subscriptions/ee98974b-5d2a-4d98-a78a-382f3715d07e/resourceGroups/test_keyvault/providers/Microsoft.KeyVault/vaults/cckeyvault1" } ] -}''' +}""" -ASQ_MESSAGE_DATADOG = '''{ +ASQ_MESSAGE_DATADOG = """{ "account":"subscription", "account_id":"ee98974b-5d2a-4d98-a78a-382f3715d07e", "region":"all", @@ -493,9 +516,9 @@ "id":"/subscriptions/ee98974b-5d2a-4d98-a78a-382f3715d07e/resourceGroups/test_keyvault/providers/Microsoft.KeyVault/vaults/cckeyvault1" } ] -}''' +}""" -ASQ_MESSAGE_MULTIPLE_ADDRS = '''{ +ASQ_MESSAGE_MULTIPLE_ADDRS = """{ "account":"subscription", "account_id":"ee98974b-5d2a-4d98-a78a-382f3715d07e", "region":"all", @@ -546,7 +569,98 @@ "id":"/subscriptions/ee98974b-5d2a-4d98-a78a-382f3715d07e/resourceGroups/test_keyvault/providers/Microsoft.KeyVault/vaults/cckeyvault1" } ] -}''' +}""" + +PUBSUB_MESSAGE_SLACK = """{ + "account": "c7n-dev", + "account_id": "c7n-dev", + "action": { + "subject": "testing notify action", + "template": "default", + "to": ["user@domain.com"], + "transport": { + "topic": "projects/c7n-dev/topics/c7n_notify", + "type": "pubsub"}, + "type": "notify"}, + "event": null, + "policy": { + "actions": [{ + "subject": "testing notify action", + "template": "default", + "to": ["slack://#test-channel"], + "transport": { + "topic": "projects/c7n-dev/topics/c7n_notify", + "type": "pubsub"}, + "type": "notify"}], + "filters": [{ + "name": "projects/c7n-dev/topics/c7n_notify"}], + "name": "test-notify", + "resource": "gcp.pubsub-topic"}, + "region": "all", + "resources": [{ + "c7n:MatchedFilters": ["name"], + "name": "projects/c7n-dev/topics/c7n_notify"}]}""" + +GCP_SMTP_MESSAGE = { + "account": "c7n-dev", + "account_id": "c7n-dev", + "action": { + "subject": "testing notify action", + "template": "default", + "to": ["user@domain.com"], + "transport": {"topic": "projects/c7n-dev/topics/c7n_notify", "type": "pubsub"}, + "type": "notify", + }, + "event": None, + "policy": { + "actions": [ + { + "subject": "testing notify action", + "template": "default", + "to": ["resource-owner", "ldap_uid_tags"], + "email_ldap_username_manager": True, + "transport": {"topic": "projects/c7n-dev/topics/c7n_notify", "type": "pubsub"}, + "type": "notify", + } + ], + "filters": [{"name": "projects/c7n-dev/topics/c7n_notify"}], + "name": "test-notify", + "resource": "gcp.pubsub-topic", + }, + "region": "all", + "resources": [{"c7n:MatchedFilters": ["name"], "name": "projects/c7n-dev/topics/c7n_notify"}], +} + +PUBSUB_MESSAGE_DATADOG = """{ + "account": "c7n-dev", + "account_id": "c7n-dev", + "action": { + "subject": "testing notify action", + "template": "default", + "to": ["datadog://?metric_name=gcp.disk.available.size"], + "transport": { + "topic": "projects/c7n-dev/topics/c7n_notify", + "type": "pubsub"}, + "type": "notify"}, + "event": null, + "policy": { + "actions": [{ + "subject": "testing notify action", + "template": "default", + "to": ["slack://#test-channel"], + "transport": { + "topic": "projects/c7n-dev/topics/c7n_notify", + "type": "pubsub"}, + "type": "notify"}], + "filters": [{ + "name": "projects/c7n-dev/topics/c7n_notify"}], + "name": "test-notify", + "resource": "gcp.pubsub-topic"}, + "region": "all", + "resources": [{ + "c7n:MatchedFilters": ["name"], + "name": "projects/c7n-dev/topics/c7n_notify"}]}""" + # Monkey-patch ldap3 to work around a bytes/text handling bug. _safe_rdn = mockBase.safe_rdn @@ -560,11 +674,8 @@ def safe_rdn(*a, **kw): def get_fake_ldap_connection(): - server = Server('my_fake_server') - connection = Connection( - server, - client_strategy=MOCK_SYNC - ) + server = Server("my_fake_server") + connection = Connection(server, client_strategy=MOCK_SYNC) connection.bind() connection.strategy.add_entry(PETER[0], PETER[1]) connection.strategy.add_entry(BILL[0], BILL[1]) @@ -572,45 +683,39 @@ def get_fake_ldap_connection(): def get_ldap_lookup(cache_engine=None, uid_regex=None): - if cache_engine == 'sqlite': - config = { - 'cache_engine': 'sqlite', - 'ldap_cache_file': ':memory:' - } - elif cache_engine == 'redis': - config = { - 'cache_engine': 'redis', - 'redis_host': 'localhost' - } + if cache_engine == "sqlite": + config = {"cache_engine": "sqlite", "ldap_cache_file": ":memory:"} + elif cache_engine == "redis": + config = {"cache_engine": "redis", "redis_host": "localhost"} if uid_regex: - config['ldap_uid_regex'] = uid_regex + config["ldap_uid_regex"] = uid_regex ldap_lookup = MockLdapLookup(config, logger) michael_bolton = { - 'dn': 'CN=Michael Bolton,cn=users,dc=initech,dc=com', - 'mail': 'michael_bolton@initech.com', - 'manager': 'CN=Milton,cn=users,dc=initech,dc=com', - 'displayName': 'Michael Bolton' + "dn": "CN=Michael Bolton,cn=users,dc=initech,dc=com", + "mail": "michael_bolton@initech.com", + "manager": "CN=Milton,cn=users,dc=initech,dc=com", + "displayName": "Michael Bolton", } milton = { - 'uid': '123456', - 'dn': 'CN=Milton,cn=users,dc=initech,dc=com', - 'mail': 'milton@initech.com', - 'manager': 'CN=cthulhu,cn=users,dc=initech,dc=com', - 'displayName': 'Milton' + "uid": "123456", + "dn": "CN=Milton,cn=users,dc=initech,dc=com", + "mail": "milton@initech.com", + "manager": "CN=cthulhu,cn=users,dc=initech,dc=com", + "displayName": "Milton", } bob_porter = { - 'dn': 'CN=Bob Porter,cn=users,dc=initech,dc=com', - 'mail': 'bob_porter@initech.com', - 'manager': 'CN=Bob Slydell,cn=users,dc=initech,dc=com', - 'displayName': 'Bob Porter' + "dn": "CN=Bob Porter,cn=users,dc=initech,dc=com", + "mail": "bob_porter@initech.com", + "manager": "CN=Bob Slydell,cn=users,dc=initech,dc=com", + "displayName": "Bob Porter", } - ldap_lookup.base_dn = 'cn=users,dc=initech,dc=com' - ldap_lookup.uid_key = 'uid' - ldap_lookup.attributes.append('uid') - ldap_lookup.caching.set('michael_bolton', michael_bolton) - ldap_lookup.caching.set(bob_porter['dn'], bob_porter) - ldap_lookup.caching.set('123456', milton) - ldap_lookup.caching.set(milton['dn'], milton) + ldap_lookup.base_dn = "cn=users,dc=initech,dc=com" + ldap_lookup.uid_key = "uid" + ldap_lookup.attributes.append("uid") + ldap_lookup.caching.set("michael_bolton", michael_bolton) + ldap_lookup.caching.set(bob_porter["dn"], bob_porter) + ldap_lookup.caching.set("123456", milton) + ldap_lookup.caching.set(milton["dn"], milton) return ldap_lookup diff --git a/tools/c7n_mailer/tests/gcp/credentials.json b/tools/c7n_mailer/tests/gcp/credentials.json new file mode 100644 index 00000000000..8b35c53f6ec --- /dev/null +++ b/tools/c7n_mailer/tests/gcp/credentials.json @@ -0,0 +1,6 @@ +{ + "client_id": "7640860-6qr4p6.apps.googleusercontent.com", + "client_secret": "d-Q19q7MQmFpd7h", + "refresh_token": "1/YZ02c13W_orYsgbXCSOi5ms20E5x", + "type": "authorized_user" +} diff --git a/tools/c7n_mailer/tests/test_azure.py b/tools/c7n_mailer/tests/test_azure.py index 6e5e18f26d6..e7873c6fbc8 100644 --- a/tools/c7n_mailer/tests/test_azure.py +++ b/tools/c7n_mailer/tests/test_azure.py @@ -10,59 +10,71 @@ from c7n_azure.storage_utils import StorageUtilities from c7n_mailer.azure_mailer import deploy -from c7n_mailer.azure_mailer.azure_queue_processor import \ - MailerAzureQueueProcessor +from c7n_mailer.azure_mailer.azure_queue_processor import MailerAzureQueueProcessor from c7n_mailer.azure_mailer.sendgrid_delivery import SendGridDelivery -from common import (ASQ_MESSAGE, ASQ_MESSAGE_DATADOG, ASQ_MESSAGE_MULTIPLE_ADDRS, ASQ_MESSAGE_SLACK, - ASQ_MESSAGE_TAG, MAILER_CONFIG_AZURE, logger) +from common import ( + ASQ_MESSAGE, + ASQ_MESSAGE_DATADOG, + ASQ_MESSAGE_MULTIPLE_ADDRS, + ASQ_MESSAGE_SLACK, + ASQ_MESSAGE_TAG, + MAILER_CONFIG_AZURE, + logger, +) class AzureTest(unittest.TestCase): - def setUp(self): self.compressed_message = MagicMock() self.compressed_message.content = base64.b64encode( - zlib.compress(ASQ_MESSAGE.encode('utf8'))) + zlib.compress(ASQ_MESSAGE.encode("utf8")) + ) self.loaded_message = json.loads(ASQ_MESSAGE) self.tag_message = json.loads(ASQ_MESSAGE_TAG) self.multiple_addrs_message = json.loads(ASQ_MESSAGE_MULTIPLE_ADDRS) - @patch('c7n_mailer.azure_mailer.sendgrid_delivery.SendGridDelivery.sendgrid_handler') - @patch('c7n_mailer.azure_mailer.sendgrid_delivery.SendGridDelivery' - '.get_to_addrs_sendgrid_messages_map') + @patch("c7n_mailer.azure_mailer.sendgrid_delivery.SendGridDelivery.sendgrid_handler") + @patch( + "c7n_mailer.azure_mailer.sendgrid_delivery.SendGridDelivery" + ".get_to_addrs_sendgrid_messages_map" + ) def test_process_azure_queue_message_success(self, mock_get_addr, mock_handler): mock_handler.return_value = True mock_get_addr.return_value = 42 # Run the process messages method azure_processor = MailerAzureQueueProcessor(MAILER_CONFIG_AZURE, logger) - self.assertTrue(azure_processor.process_azure_queue_message(self.compressed_message)) + self.assertTrue( + azure_processor.process_azure_queue_message(self.compressed_message, "timestamp")) # Verify mock calls were correct mock_get_addr.assert_called_with(self.loaded_message) mock_handler.assert_called_with(self.loaded_message, 42) - @patch('c7n_mailer.azure_mailer.sendgrid_delivery.SendGridDelivery.sendgrid_handler') - @patch('c7n_mailer.azure_mailer.sendgrid_delivery.SendGridDelivery' - '.get_to_addrs_sendgrid_messages_map') + @patch("c7n_mailer.azure_mailer.sendgrid_delivery.SendGridDelivery.sendgrid_handler") + @patch( + "c7n_mailer.azure_mailer.sendgrid_delivery.SendGridDelivery" + ".get_to_addrs_sendgrid_messages_map" + ) def test_process_azure_queue_message_failure(self, mock_get_addr, mock_handler): mock_handler.return_value = False mock_get_addr.return_value = 42 # Run the process messages method azure_processor = MailerAzureQueueProcessor(MAILER_CONFIG_AZURE, logger) - self.assertFalse(azure_processor.process_azure_queue_message(self.compressed_message)) + self.assertFalse( + azure_processor.process_azure_queue_message(self.compressed_message, "timestamp")) # Verify mock calls were correct mock_get_addr.assert_called_with(self.loaded_message) mock_handler.assert_called_with(self.loaded_message, 42) - @patch.object(MailerAzureQueueProcessor, 'process_azure_queue_message') - @patch.object(StorageUtilities, 'get_queue_client_by_uri') - @patch.object(StorageUtilities, 'delete_queue_message') - @patch.object(StorageUtilities, 'get_queue_messages') + @patch.object(MailerAzureQueueProcessor, "process_azure_queue_message") + @patch.object(StorageUtilities, "get_queue_client_by_uri") + @patch.object(StorageUtilities, "delete_queue_message") + @patch.object(StorageUtilities, "get_queue_messages") def test_run(self, mock_get_messages, mock_delete, mock_client, mock_process): mock_get_messages.side_effect = [[self.compressed_message], []] mock_client.return_value = (None, None) @@ -76,74 +88,77 @@ def test_run(self, mock_get_messages, mock_delete, mock_client, mock_process): self.assertEqual(1, mock_process.call_count) mock_delete.assert_called() - @patch('sendgrid.SendGridAPIClient.send') + @patch("sendgrid.SendGridAPIClient.send") def test_sendgrid_handler(self, mock_send): sendgrid_delivery = SendGridDelivery(MAILER_CONFIG_AZURE, Mock(), logger) - sendgrid_messages = \ - sendgrid_delivery.get_to_addrs_sendgrid_messages_map(self.loaded_message) + sendgrid_messages = sendgrid_delivery.get_to_addrs_sendgrid_messages_map( + self.loaded_message + ) result = sendgrid_delivery.sendgrid_handler(self.loaded_message, sendgrid_messages) self.assertTrue(result) mock_send.assert_called_once() mail_contents = mock_send.call_args[0][0].contents[0].content - self.assertIn('The following azure.keyvault resources', mail_contents) + self.assertIn("The following azure.keyvault resources", mail_contents) - @patch('sendgrid.SendGridAPIClient.send') + @patch("sendgrid.SendGridAPIClient.send") def test_sendgrid_handler_multiple_to_addrs(self, mock_send): sendgrid_delivery = SendGridDelivery(MAILER_CONFIG_AZURE, Mock(), logger) - sendgrid_messages = \ - sendgrid_delivery.get_to_addrs_sendgrid_messages_map(self.multiple_addrs_message) + sendgrid_messages = sendgrid_delivery.get_to_addrs_sendgrid_messages_map( + self.multiple_addrs_message + ) result = sendgrid_delivery.sendgrid_handler(self.multiple_addrs_message, sendgrid_messages) self.assertTrue(result) self.assertEqual(2, mock_send.call_count) mail_contents = mock_send.call_args[0][0].contents[0].content - self.assertIn('The following azure.keyvault resources', mail_contents) + self.assertIn("The following azure.keyvault resources", mail_contents) - address_one = mock_send.call_args_list[0][0][0].personalizations[0].tos[0]['email'] + address_one = mock_send.call_args_list[0][0][0].personalizations[0].tos[0]["email"] self.assertEqual("user2@domain.com", address_one) - address_two = mock_send.call_args_list[1][0][0].personalizations[0].tos[0]['email'] + address_two = mock_send.call_args_list[1][0][0].personalizations[0].tos[0]["email"] self.assertEqual("user@domain.com", address_two) def test_azure_mailer_requirements(self): reqs = deploy.get_mailer_requirements() - self.assertIn('adal', reqs) - self.assertIn('azure-storage-blob', reqs) - self.assertIn('azure-storage-queue', reqs) - self.assertIn('azure-common', reqs) - self.assertIn('azure-mgmt-managementgroups', reqs) - self.assertIn('azure-mgmt-web', reqs) - self.assertIn('azure-graphrbac', reqs) - self.assertIn('msrestazure', reqs) - self.assertIn('jmespath', reqs) - self.assertIn('jinja2', reqs) - self.assertIn('sendgrid', reqs) - self.assertIn('ldap3', reqs) - self.assertIn('netaddr', reqs) - - @patch('c7n_mailer.azure_mailer.deploy.FunctionPackage') + self.assertIn("adal", reqs) + self.assertIn("azure-storage-blob", reqs) + self.assertIn("azure-storage-queue", reqs) + self.assertIn("azure-common", reqs) + self.assertIn("azure-mgmt-managementgroups", reqs) + self.assertIn("azure-mgmt-web", reqs) + self.assertIn("azure-graphrbac", reqs) + self.assertIn("msrestazure", reqs) + self.assertIn("jmespath", reqs) + self.assertIn("jinja2", reqs) + self.assertIn("sendgrid", reqs) + self.assertIn("ldap3", reqs) + self.assertIn("netaddr", reqs) + + @patch("c7n_mailer.azure_mailer.deploy.FunctionPackage") def test_build_function_package(self, package_mock): - deploy.build_function_package(MAILER_CONFIG_AZURE, "test_mailer", 'sub') + deploy.build_function_package(MAILER_CONFIG_AZURE, "test_mailer", "sub") package_mock.assert_called_with( - "test_mailer", - ANY, - target_sub_ids=['sub'], - cache_override_path=deploy.cache_path()) + "test_mailer", ANY, target_sub_ids=["sub"], cache_override_path=deploy.cache_path() + ) package_mock.return_value.pkg.add_contents.assert_any_call( - "test_mailer_sub/config.json", contents=ANY) + "test_mailer_sub/config.json", contents=ANY + ) package_mock.return_value.pkg.add_contents.assert_any_call( - "test_mailer_sub/function.json", contents=ANY) - - @patch('c7n_mailer.azure_mailer.deploy.build_function_package') - @patch('c7n_mailer.azure_mailer.deploy.FunctionAppUtilities') - @patch('c7n_mailer.azure_mailer.deploy.Session') - @patch('c7n_mailer.azure_mailer.deploy.local_session') - def test_provision_embedded_auth(self, mock_local_session, mock_session, - mock_func_utils, mock_build_pkg): - mock_session.get_subscription_id.return_value = 'mock-id' + "test_mailer_sub/function.json", contents=ANY + ) + + @patch("c7n_mailer.azure_mailer.deploy.build_function_package") + @patch("c7n_mailer.azure_mailer.deploy.FunctionAppUtilities") + @patch("c7n_mailer.azure_mailer.deploy.Session") + @patch("c7n_mailer.azure_mailer.deploy.local_session") + def test_provision_embedded_auth( + self, mock_local_session, mock_session, mock_func_utils, mock_build_pkg + ): + mock_session.get_subscription_id.return_value = "mock-id" mock_local_session.return_value = mock_session - mock_func_utils.get_function_name.return_value = 'mock-func-name' + mock_func_utils.get_function_name.return_value = "mock-func-name" mock_build_pkg.return_value = MagicMock() deploy.provision(MAILER_CONFIG_AZURE) @@ -153,25 +168,25 @@ def test_provision_embedded_auth(self, mock_local_session, mock_session, service_plan=ANY, storage_account=ANY, function_app={ - 'resource_group_name': 'cloud-custodian', - 'identity': { - 'type': 'Embedded' - }, - 'name': 'mock-func-name' - }) + "resource_group_name": "cloud-custodian", + "identity": {"type": "Embedded"}, + "name": "mock-func-name", + }, + ) mock_func_utils.deploy_function_app.assert_called_once() mock_func_utils.publish_functions_package.assert_called_once() - @patch('c7n_mailer.azure_mailer.deploy.build_function_package') - @patch('c7n_mailer.azure_mailer.deploy.FunctionAppUtilities') - @patch('c7n_mailer.azure_mailer.deploy.Session') - @patch('c7n_mailer.azure_mailer.deploy.local_session') - def test_provision_msi_auth(self, mock_local_session, mock_session, - mock_func_utils, mock_build_pkg): - mock_session.get_subscription_id.return_value = 'mock-id' + @patch("c7n_mailer.azure_mailer.deploy.build_function_package") + @patch("c7n_mailer.azure_mailer.deploy.FunctionAppUtilities") + @patch("c7n_mailer.azure_mailer.deploy.Session") + @patch("c7n_mailer.azure_mailer.deploy.local_session") + def test_provision_msi_auth( + self, mock_local_session, mock_session, mock_func_utils, mock_build_pkg + ): + mock_session.get_subscription_id.return_value = "mock-id" mock_local_session.return_value = mock_session - mock_func_utils.get_function_name.return_value = 'mock-func-name' + mock_func_utils.get_function_name.return_value = "mock-func-name" mock_build_pkg.return_value = MagicMock() system_assigned = {"identity": {"type": "SystemAssigned"}} @@ -183,32 +198,28 @@ def test_provision_msi_auth(self, mock_local_session, mock_session, service_plan=ANY, storage_account=ANY, function_app={ - 'resource_group_name': 'cloud-custodian', - 'identity': { - 'type': 'SystemAssigned' - }, - 'name': 'mock-func-name' - }) + "resource_group_name": "cloud-custodian", + "identity": {"type": "SystemAssigned"}, + "name": "mock-func-name", + }, + ) mock_func_utils.deploy_function_app.assert_called_once() mock_func_utils.publish_functions_package.assert_called_once() - @patch('c7n_mailer.azure_mailer.deploy.build_function_package') - @patch('c7n_mailer.azure_mailer.deploy.FunctionAppUtilities') - @patch('c7n_mailer.azure_mailer.deploy.Session') - @patch('c7n_mailer.azure_mailer.deploy.local_session') - def test_provision_uai_auth(self, mock_local_session, mock_session, - mock_func_utils, mock_build_pkg): - mock_session.get_subscription_id.return_value = 'mock-id' + @patch("c7n_mailer.azure_mailer.deploy.build_function_package") + @patch("c7n_mailer.azure_mailer.deploy.FunctionAppUtilities") + @patch("c7n_mailer.azure_mailer.deploy.Session") + @patch("c7n_mailer.azure_mailer.deploy.local_session") + def test_provision_uai_auth( + self, mock_local_session, mock_session, mock_func_utils, mock_build_pkg + ): + mock_session.get_subscription_id.return_value = "mock-id" mock_local_session.return_value = mock_session - mock_func_utils.get_function_name.return_value = 'mock-func-name' + mock_func_utils.get_function_name.return_value = "mock-func-name" mock_build_pkg.return_value = MagicMock() user_assigned = { - "identity": { - "type": "UserAssigned", - "id": "mock-id", - "client_id": "mock-client-id" - } + "identity": {"type": "UserAssigned", "id": "mock-id", "client_id": "mock-client-id"} } with patch.dict(MAILER_CONFIG_AZURE, {"function_properties": user_assigned}): @@ -219,77 +230,90 @@ def test_provision_uai_auth(self, mock_local_session, mock_session, service_plan=ANY, storage_account=ANY, function_app={ - 'resource_group_name': 'cloud-custodian', - 'identity': { - 'type': 'UserAssigned', - 'id': 'mock-id', - 'client_id': 'mock-client-id' + "resource_group_name": "cloud-custodian", + "identity": { + "type": "UserAssigned", + "id": "mock-id", + "client_id": "mock-client-id", }, - 'name': 'mock-func-name' - }) + "name": "mock-func-name", + }, + ) mock_func_utils.deploy_function_app.assert_called_once() mock_func_utils.publish_functions_package.assert_called_once() - @patch('c7n_mailer.azure_mailer.azure_queue_processor.SmtpDelivery') + @patch("c7n_mailer.azure_mailer.azure_queue_processor.SmtpDelivery") def test_smtp_delivery(self, mock_smtp): smtp_mailer_config = { - 'queue_url': 'asq://storageaccount.queue.core.windows.net/queuename', - 'from_address': 'you@youremail.com', - 'smtp_port': 25, - 'smtp_ssl': True, - 'smtp_server': 'test_server', - 'smtp_username': 'user', - 'smtp_password': 'password' + "queue_url": "asq://storageaccount.queue.core.windows.net/queuename", + "from_address": "you@youremail.com", + "smtp_port": 25, + "smtp_ssl": True, + "smtp_server": "test_server", + "smtp_username": "user", + "smtp_password": "password", } - with patch('c7n_mailer.azure_mailer.sendgrid_delivery.SendGridDelivery' - '.get_to_addrs_sendgrid_messages_map', - return_value={('mock@test.com',): self.loaded_message}): + with patch( + "c7n_mailer.azure_mailer.sendgrid_delivery.SendGridDelivery" + ".get_to_addrs_sendgrid_messages_map", + return_value={("mock@test.com",): self.loaded_message}, + ): azure_processor = MailerAzureQueueProcessor(smtp_mailer_config, logger) - self.assertTrue(azure_processor.process_azure_queue_message(self.compressed_message)) + self.assertTrue( + azure_processor.process_azure_queue_message(self.compressed_message, "timestamp")) mock_smtp.assert_has_calls( - [call().send_message(message=self.loaded_message, to_addrs=['mock@test.com'])]) + [call().send_message(message=self.loaded_message, to_addrs=["mock@test.com"])] + ) - @patch('c7n_mailer.slack_delivery.SlackDelivery') + @patch("c7n_mailer.slack_delivery.SlackDelivery") def test_slack_delivery(self, mock_slack): slack_mailer_config = { - 'queue_url': 'asq://storageaccount.queue.core.windows.net/queuename', - 'slack_token': 'mock_token' + "queue_url": "asq://storageaccount.queue.core.windows.net/queuename", + "slack_token": "mock_token", } slack_compressed_message = MagicMock() slack_compressed_message.content = base64.b64encode( - zlib.compress(ASQ_MESSAGE_SLACK.encode('utf8'))) + zlib.compress(ASQ_MESSAGE_SLACK.encode("utf8")) + ) slack_loaded_message = json.loads(ASQ_MESSAGE_SLACK) - mock_slack.return_value\ - .get_to_addrs_slack_messages_map.return_value = 'mock_slack_message_map' + mock_slack.return_value.get_to_addrs_slack_messages_map.return_value = ( + "mock_slack_message_map" + ) azure_processor = MailerAzureQueueProcessor(slack_mailer_config, logger) - self.assertTrue(azure_processor.process_azure_queue_message(slack_compressed_message)) + self.assertTrue( + azure_processor.process_azure_queue_message(slack_compressed_message, "timestamp")) mock_slack.assert_has_calls( - [call().slack_handler(slack_loaded_message, 'mock_slack_message_map')]) + [call().slack_handler(slack_loaded_message, "mock_slack_message_map")] + ) - @patch('c7n_mailer.datadog_delivery.DataDogDelivery') + @patch("c7n_mailer.datadog_delivery.DataDogDelivery") def test_datadog_delivery(self, mock_datadog): datadog_mailer_config = { - 'queue_url': 'asq://storageaccount.queue.core.windows.net/queuename', - 'datadog_api_key': 'mock_api_key', - 'datadog_application_key': 'mock_application_key' + "queue_url": "asq://storageaccount.queue.core.windows.net/queuename", + "datadog_api_key": "mock_api_key", + "datadog_application_key": "mock_application_key", } datadog_compressed_message = MagicMock() datadog_compressed_message.content = base64.b64encode( - zlib.compress(ASQ_MESSAGE_DATADOG.encode('utf8'))) + zlib.compress(ASQ_MESSAGE_DATADOG.encode("utf8")) + ) datadog_loaded_message = json.loads(ASQ_MESSAGE_DATADOG) - mock_datadog.return_value\ - .get_datadog_message_packages.return_value = 'mock_datadog_message_map' + mock_datadog.return_value.get_datadog_message_packages.return_value = ( + "mock_datadog_message_map" + ) azure_processor = MailerAzureQueueProcessor(datadog_mailer_config, logger) - self.assertTrue(azure_processor.process_azure_queue_message(datadog_compressed_message)) + self.assertTrue( + azure_processor.process_azure_queue_message(datadog_compressed_message, "timestamp")) mock_datadog.assert_has_calls( - [call().deliver_datadog_messages('mock_datadog_message_map', datadog_loaded_message)]) + [call().deliver_datadog_messages("mock_datadog_message_map", datadog_loaded_message)] + ) diff --git a/tools/c7n_mailer/tests/test_azure_mailer_utils.py b/tools/c7n_mailer/tests/test_azure_mailer_utils.py index 3004d662e9d..8fc4baadf2d 100644 --- a/tools/c7n_mailer/tests/test_azure_mailer_utils.py +++ b/tools/c7n_mailer/tests/test_azure_mailer_utils.py @@ -8,15 +8,14 @@ class AzureUtilsTest(unittest.TestCase): - def test_azure_decrypt_raw(self): - self.assertEqual(azure_decrypt({'test': 'value'}, Mock(), Mock(), 'test'), 'value') - self.assertEqual(azure_decrypt({'test': 'value'}, Mock(), Mock(), 'test'), 'value') + self.assertEqual(azure_decrypt({"test": "value"}, Mock(), Mock(), "test"), "value") + self.assertEqual(azure_decrypt({"test": "value"}, Mock(), Mock(), "test"), "value") def test_azure_decrypt_secret(self): - config = {'test': {'secret': 'https://ccvault.vault.azure.net/secrets/password'}} + config = {"test": {"secret": "https://ccvault.vault.azure.net/secrets/password"}} session_mock = Mock() - session_mock.client().get_secret().value = 'value' + session_mock.client().get_secret().value = "value" session_mock.get_session_for_resource.return_value = session_mock - self.assertEqual(azure_decrypt(config, Mock(), session_mock, 'test'), 'value') + self.assertEqual(azure_decrypt(config, Mock(), session_mock, "test"), "value") diff --git a/tools/c7n_mailer/tests/test_datadog.py b/tools/c7n_mailer/tests/test_datadog.py index c426054eeb0..176df1631af 100644 --- a/tools/c7n_mailer/tests/test_datadog.py +++ b/tools/c7n_mailer/tests/test_datadog.py @@ -11,64 +11,70 @@ from c7n_mailer.datadog_delivery import DataDogDelivery -DATADOG_APPLICATION_KEY = 'datadog_application_key' -DATADOG_API_KEY = 'datadog_api_key' -MESSAGE_ANSWER = [[ - 'Attachments:[]', - 'AvailabilityZone:us-east-1a', - 'CreatorName:peter', - 'SupportEmail:milton@initech.com', - 'VolumeId:vol-01a0e6ea6b89f0099', - 'account:core-services-dev', - 'account_id:000000000000', - 'event:None', - 'region:us-east-1' -]] +DATADOG_APPLICATION_KEY = "datadog_application_key" +DATADOG_API_KEY = "datadog_api_key" +MESSAGE_ANSWER = [ + [ + "Attachments:[]", + "AvailabilityZone:us-east-1a", + "CreatorName:peter", + "SupportEmail:milton@initech.com", + "VolumeId:vol-01a0e6ea6b89f0099", + "account:core-services-dev", + "account_id:000000000000", + "event:None", + "region:us-east-1", + ] +] DATADOG_METRIC_SQS_MESSAGE_2 = [ { - 'metric': 'EBS_volume.available.size', - 'points': (0, 1), - 'tags': [ - 'Attachments:[]', - 'AvailabilityZone:us-east-1a', - 'CreatorName:peter', - 'SupportEmail:milton@initech.com', - 'VolumeId:vol-01a0e6ea6b89f0099', - 'account:core-services-dev', - 'account_id:000000000000', - 'event:None', - 'region:us-east-1'] - }, { - 'metric': 'EBS_volume.available.size', - 'points': (0, 1), - 'tags': [ - 'Attachments:[]', - 'AvailabilityZone:us-east-1c', - 'CreatorName:peter', - 'Size:8', - 'SupportEmail:milton@initech.com', - 'VolumeId:vol-21a0e7ea9b19f0043', - 'account:core-services-dev', - 'account_id:000000000000', - 'event:None', - 'region:us-east-1'] - } + "metric": "EBS_volume.available.size", + "points": (0, 1), + "tags": [ + "Attachments:[]", + "AvailabilityZone:us-east-1a", + "CreatorName:peter", + "SupportEmail:milton@initech.com", + "VolumeId:vol-01a0e6ea6b89f0099", + "account:core-services-dev", + "account_id:000000000000", + "event:None", + "region:us-east-1", + ], + }, + { + "metric": "EBS_volume.available.size", + "points": (0, 1), + "tags": [ + "Attachments:[]", + "AvailabilityZone:us-east-1c", + "CreatorName:peter", + "Size:8", + "SupportEmail:milton@initech.com", + "VolumeId:vol-21a0e7ea9b19f0043", + "account:core-services-dev", + "account_id:000000000000", + "event:None", + "region:us-east-1", + ], + }, ] DATADOG_METRIC_SQS_MESSAGE_3 = [ { - 'metric': 'EBS_volume.available.size', - 'points': (0, 8.0), - 'tags': [ - 'Attachments:[]', - 'AvailabilityZone:us-east-1c', - 'CreatorName:peter', - 'Size:8', - 'SupportEmail:milton@initech.com', - 'VolumeId:vol-21a0e7ea9b19f0043', - 'account:core-services-dev', - 'account_id:000000000000', - 'event:None', - 'region:us-east-1'] + "metric": "EBS_volume.available.size", + "points": (0, 8.0), + "tags": [ + "Attachments:[]", + "AvailabilityZone:us-east-1c", + "CreatorName:peter", + "Size:8", + "SupportEmail:milton@initech.com", + "VolumeId:vol-21a0e7ea9b19f0043", + "account:core-services-dev", + "account_id:000000000000", + "event:None", + "region:us-east-1", + ], } ] @@ -76,13 +82,13 @@ class TestDataDogDelivery(unittest.TestCase): def setUp(self): self.config = { - 'datadog_application_key': DATADOG_APPLICATION_KEY, - 'datadog_api_key': DATADOG_API_KEY + "datadog_application_key": DATADOG_APPLICATION_KEY, + "datadog_api_key": DATADOG_API_KEY, } - self.session = patch('boto3.Session') + self.session = patch("boto3.Session") self.logger = MagicMock() - self.patcher_datadog_initialize = patch('c7n_mailer.datadog_delivery.initialize') + self.patcher_datadog_initialize = patch("c7n_mailer.datadog_delivery.initialize") self.mock_datadog_initialize = self.patcher_datadog_initialize.start() def tearDown(self): @@ -92,7 +98,8 @@ def test_should_initialize_datadog_with_keys_in_config(self): DataDogDelivery(self.config, self.session, self.logger) self.mock_datadog_initialize.assert_called_with( - api_key=DATADOG_API_KEY, app_key=DATADOG_APPLICATION_KEY) + api_key=DATADOG_API_KEY, app_key=DATADOG_APPLICATION_KEY + ) def test_should_not_initialize_datadog_with_no_keys_in_config(self): DataDogDelivery({}, self.session, self.logger) @@ -104,48 +111,51 @@ def test_datadog_message_packages_should_return_empty_list_if_no_sqs_messages_re assert data_dog_delivery.get_datadog_message_packages(None) == [] - @patch('c7n_mailer.datadog_delivery.time.time', return_value=0) + @patch("c7n_mailer.datadog_delivery.time.time", return_value=0) def test_datadog_message_packages_should_return_messages(self, mock_time): data_dog_delivery = DataDogDelivery(self.config, self.session, self.logger) answer = data_dog_delivery.get_datadog_message_packages(SQS_MESSAGE_2) - answer[0]['tags'].sort() - answer[1]['tags'].sort() + answer[0]["tags"].sort() + answer[1]["tags"].sort() assert len(answer) == 2 assert answer == DATADOG_METRIC_SQS_MESSAGE_2 - @patch('c7n_mailer.datadog_delivery.time.time', return_value=0) - @patch('c7n_mailer.datadog_delivery.api.Metric.send') + @patch("c7n_mailer.datadog_delivery.time.time", return_value=0) + @patch("c7n_mailer.datadog_delivery.api.Metric.send") def test_deliver_datadog_messages_should_send_correct_metric_to_datadog( - self, mock_datadog_api, mock_time): + self, mock_datadog_api, mock_time + ): datadog_delivery = DataDogDelivery(self.config, self.session, self.logger) datadog_message_packages = datadog_delivery.get_datadog_message_packages(SQS_MESSAGE_2) datadog_delivery.deliver_datadog_messages(datadog_message_packages, SQS_MESSAGE_2) answer = mock_datadog_api.mock_calls[0][1][0] - answer[0]['tags'].sort() - answer[1]['tags'].sort() + answer[0]["tags"].sort() + answer[1]["tags"].sort() assert answer == DATADOG_METRIC_SQS_MESSAGE_2 - @patch('c7n_mailer.datadog_delivery.time.time', return_value=0) - @patch('c7n_mailer.datadog_delivery.api.Metric.send') + @patch("c7n_mailer.datadog_delivery.time.time", return_value=0) + @patch("c7n_mailer.datadog_delivery.api.Metric.send") def test_deliver_datadog_messages_should_send_correct_metric_value_to_datadog( - self, mock_datadog_api, mock_time): + self, mock_datadog_api, mock_time + ): datadog_delivery = DataDogDelivery(self.config, self.session, self.logger) datadog_message_packages = datadog_delivery.get_datadog_message_packages(SQS_MESSAGE_3) datadog_delivery.deliver_datadog_messages(datadog_message_packages, SQS_MESSAGE_3) answer = mock_datadog_api.mock_calls[0][1][0] - answer[0]['tags'].sort() + answer[0]["tags"].sort() assert answer == DATADOG_METRIC_SQS_MESSAGE_3 - @patch('c7n_mailer.datadog_delivery.time.time', return_value=0) - @patch('c7n_mailer.datadog_delivery.api.Metric.send') + @patch("c7n_mailer.datadog_delivery.time.time", return_value=0) + @patch("c7n_mailer.datadog_delivery.api.Metric.send") def test_deliver_datadog_messages_should_not_send_metric_if_metrics_are_empty( - self, mock_datadog_api, mock_time): + self, mock_datadog_api, mock_time + ): datadog_delivery = DataDogDelivery(self.config, self.session, self.logger) datadog_delivery.deliver_datadog_messages([], SQS_MESSAGE_3) diff --git a/tools/c7n_mailer/tests/test_email.py b/tools/c7n_mailer/tests/test_email.py index 8582186e48a..d4d07b3fc34 100644 --- a/tools/c7n_mailer/tests/test_email.py +++ b/tools/c7n_mailer/tests/test_email.py @@ -7,8 +7,16 @@ import unittest from c7n_mailer.email_delivery import EmailDelivery -from common import logger, get_ldap_lookup -from common import MAILER_CONFIG, RESOURCE_1, SQS_MESSAGE_1, SQS_MESSAGE_4 +from common import ( + logger, + get_ldap_lookup, + GCP_SMTP_MESSAGE, + MAILER_CONFIG, + MAILER_CONFIG_GCP, + RESOURCE_1, + SQS_MESSAGE_1, + SQS_MESSAGE_4, +) from mock import patch, call, MagicMock from c7n_mailer.utils_email import is_email, priority_header_is_valid, get_mimetext_message @@ -16,14 +24,14 @@ # note principalId is very org/domain specific for federated?, it would be good to get # confirmation from capone on this event / test. CLOUDTRAIL_EVENT = { - 'detail': { - 'userIdentity': { + "detail": { + "userIdentity": { "type": "IAMUser", "principalId": "AIDAJ45Q7YFFAREXAMPLE", "arn": "arn:aws:iam::123456789012:user/michael_bolton", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", - "userName": "michael_bolton" + "userName": "michael_bolton", } } } @@ -31,84 +39,96 @@ class MockEmailDelivery(EmailDelivery): def get_ldap_connection(self): - return get_ldap_lookup(cache_engine='redis') + return get_ldap_lookup(cache_engine="redis") class EmailTest(unittest.TestCase): - def setUp(self): self.aws_session = boto3.Session() self.email_delivery = MockEmailDelivery(MAILER_CONFIG, self.aws_session, logger) - self.email_delivery.ldap_lookup.uid_regex = '' - template_abs_filename = os.path.join(os.path.abspath(os.path.dirname(__file__)), - 'example.jinja') + self.email_delivery.ldap_lookup.uid_regex = "" + template_abs_filename = os.path.join( + os.path.abspath(os.path.dirname(__file__)), "example.jinja" + ) # Jinja paths must always be forward slashes regardless of operating system - template_abs_filename = template_abs_filename.replace('\\', '/') + template_abs_filename = template_abs_filename.replace("\\", "/") - SQS_MESSAGE_1['action']['template'] = template_abs_filename - SQS_MESSAGE_4['action']['template'] = template_abs_filename + SQS_MESSAGE_1["action"]["template"] = template_abs_filename + SQS_MESSAGE_4["action"]["template"] = template_abs_filename def test_valid_email(self): - self.assertFalse(is_email('foobar')) - self.assertFalse(is_email('foo@bar')) - self.assertFalse(is_email('slack://foo@bar.com')) - self.assertTrue(is_email('foo@bar.com')) + self.assertFalse(is_email("foobar")) + self.assertFalse(is_email("foo@bar")) + self.assertFalse(is_email("slack://foo@bar.com")) + self.assertTrue(is_email("foo@bar.com")) def test_smtp_creds(self): conf = dict(MAILER_CONFIG) - conf['smtp_username'] = 'alice' - conf['smtp_password'] = 'bob' + conf["smtp_username"] = "alice" + conf["smtp_password"] = "bob" msg = dict(SQS_MESSAGE_1) deliver = MockEmailDelivery(conf, self.aws_session, logger) messages_map = deliver.get_to_addrs_email_messages_map(msg) with patch("smtplib.SMTP") as mock_smtp: - with patch('c7n_mailer.utils.kms_decrypt') as mock_decrypt: - mock_decrypt.return_value = 'xyz' + with patch("c7n_mailer.utils.kms_decrypt") as mock_decrypt: + mock_decrypt.return_value = "xyz" for email_addrs, mimetext_msg in messages_map.items(): deliver.send_c7n_email(msg, list(email_addrs), mimetext_msg) mock_decrypt.assert_called_once() - mock_smtp.assert_has_calls([call().login('alice', 'xyz')]) + mock_smtp.assert_has_calls([call().login("alice", "xyz")]) + + def test_kms_not_called_for_gcp(self): + conf = dict(MAILER_CONFIG_GCP) + conf["smtp_username"] = "alice" + conf["smtp_password"] = "bob" + + msg = dict(GCP_SMTP_MESSAGE) + deliver = MockEmailDelivery(conf, self.aws_session, logger) + messages_map = deliver.get_to_addrs_email_messages_map(msg) + + with patch("smtplib.SMTP") as mock_smtp: + with patch("c7n_mailer.utils.kms_decrypt") as mock_decrypt: + # mock_decrypt.return_value = "xyz" + print(messages_map) + for email_addrs, mimetext_msg in messages_map.items(): + deliver.send_c7n_email(msg, list(email_addrs), mimetext_msg) + mock_decrypt.assert_not_called() + mock_smtp.assert_has_calls([call().login("alice", "bob")]) def test_priority_header_is_valid(self): - self.assertFalse(priority_header_is_valid('0', self.email_delivery.logger)) - self.assertFalse(priority_header_is_valid('-1', self.email_delivery.logger)) - self.assertFalse(priority_header_is_valid('6', self.email_delivery.logger)) - self.assertFalse(priority_header_is_valid('sd', self.email_delivery.logger)) - self.assertTrue(priority_header_is_valid('1', self.email_delivery.logger)) - self.assertTrue(priority_header_is_valid('5', self.email_delivery.logger)) + self.assertFalse(priority_header_is_valid("0", self.email_delivery.logger)) + self.assertFalse(priority_header_is_valid("-1", self.email_delivery.logger)) + self.assertFalse(priority_header_is_valid("6", self.email_delivery.logger)) + self.assertFalse(priority_header_is_valid("sd", self.email_delivery.logger)) + self.assertTrue(priority_header_is_valid("1", self.email_delivery.logger)) + self.assertTrue(priority_header_is_valid("5", self.email_delivery.logger)) def test_get_valid_emails_from_list(self): list_1 = [ - 'michael_bolton@initech.com', - 'lsdk', - 'resource-owner', - 'event-owner', - 'bill@initech.com' + "michael_bolton@initech.com", + "lsdk", + "resource-owner", + "event-owner", + "bill@initech.com", ] valid_emails = self.email_delivery.get_valid_emails_from_list(list_1) - self.assertEqual(valid_emails, ['michael_bolton@initech.com', 'bill@initech.com']) + self.assertEqual(valid_emails, ["michael_bolton@initech.com", "bill@initech.com"]) def test_event_owner_ldap_flow(self): - targets = ['event-owner'] + targets = ["event-owner"] michael_bolton_email = self.email_delivery.get_event_owner_email(targets, CLOUDTRAIL_EVENT) - self.assertEqual(michael_bolton_email, ['michael_bolton@initech.com']) + self.assertEqual(michael_bolton_email, ["michael_bolton@initech.com"]) def test_get_ldap_emails_from_resource(self): - SQS_MESSAGE_1['action']['email_ldap_username_manager'] = False - ldap_emails = self.email_delivery.get_ldap_emails_from_resource( - SQS_MESSAGE_1, - RESOURCE_1 - ) - self.assertEqual(ldap_emails, ['peter@initech.com']) - SQS_MESSAGE_1['action']['email_ldap_username_manager'] = True - ldap_emails = self.email_delivery.get_ldap_emails_from_resource( - SQS_MESSAGE_1, - RESOURCE_1 - ) - self.assertEqual(ldap_emails, ['peter@initech.com', 'bill_lumberg@initech.com']) + SQS_MESSAGE_1["action"]["email_ldap_username_manager"] = False + ldap_emails = self.email_delivery.get_ldap_emails_from_resource(SQS_MESSAGE_1, RESOURCE_1) + self.assertEqual(ldap_emails, ["peter@initech.com"]) + SQS_MESSAGE_1["action"]["email_ldap_username_manager"] = True + ldap_emails = self.email_delivery.get_ldap_emails_from_resource(SQS_MESSAGE_1, RESOURCE_1) + self.assertEqual(ldap_emails, ["peter@initech.com", "bill_lumberg@initech.com"]) def test_email_to_resources_map_with_ldap_manager(self): emails_to_resources_map = self.email_delivery.get_email_to_addrs_to_resources_map( @@ -116,37 +136,32 @@ def test_email_to_resources_map_with_ldap_manager(self): ) # make sure only 1 email is queued to go out self.assertEqual(len(emails_to_resources_map.items()), 1) - to_emails = ('bill_lumberg@initech.com', 'milton@initech.com', 'peter@initech.com') + to_emails = ("bill_lumberg@initech.com", "milton@initech.com", "peter@initech.com") self.assertEqual(emails_to_resources_map, {to_emails: [RESOURCE_1]}) def test_email_to_email_message_map_without_ldap_manager(self): SQS_MESSAGE = copy.deepcopy(SQS_MESSAGE_1) - SQS_MESSAGE['policy']['actions'][1].pop('email_ldap_username_manager', None) + SQS_MESSAGE["policy"]["actions"][1].pop("email_ldap_username_manager", None) email_addrs_to_email_message_map = self.email_delivery.get_to_addrs_email_messages_map( SQS_MESSAGE ) - to_emails = ('bill_lumberg@initech.com', 'milton@initech.com', 'peter@initech.com') + to_emails = ("bill_lumberg@initech.com", "milton@initech.com", "peter@initech.com") items = list(email_addrs_to_email_message_map.items()) self.assertEqual(items[0][0], to_emails) - self.assertEqual(items[0][1]['to'], ', '.join(to_emails)) + self.assertEqual(items[0][1]["to"], ", ".join(to_emails)) def test_email_to_email_message_map_additional_headers(self): conf = dict(MAILER_CONFIG) - conf['additional_email_headers'] = { - 'X-Foo': 'X-Foo-Value', - 'X-Bar': '1234' - } - email_delivery = MockEmailDelivery( - conf, self.aws_session, logger - ) + conf["additional_email_headers"] = {"X-Foo": "X-Foo-Value", "X-Bar": "1234"} + email_delivery = MockEmailDelivery(conf, self.aws_session, logger) SQS_MESSAGE = copy.deepcopy(SQS_MESSAGE_1) - SQS_MESSAGE['policy']['actions'][1].pop('email_ldap_username_manager', None) + SQS_MESSAGE["policy"]["actions"][1].pop("email_ldap_username_manager", None) email_addrs_to_email_message_map = email_delivery.get_to_addrs_email_messages_map( SQS_MESSAGE ) for _, mimetext_msg in email_addrs_to_email_message_map.items(): - self.assertEqual(mimetext_msg['X-Foo'], 'X-Foo-Value') - self.assertEqual(mimetext_msg['X-Bar'], '1234') + self.assertEqual(mimetext_msg["X-Foo"], "X-Foo-Value") + self.assertEqual(mimetext_msg["X-Bar"], "1234") def test_smtp_called_once(self): SQS_MESSAGE = copy.deepcopy(SQS_MESSAGE_1) @@ -157,7 +172,7 @@ def test_smtp_called_once(self): for email_addrs, mimetext_msg in to_addrs_to_email_messages_map.items(): self.email_delivery.send_c7n_email(SQS_MESSAGE, list(email_addrs), mimetext_msg) - self.assertEqual(mimetext_msg['X-Priority'], '1 (Highest)') + self.assertEqual(mimetext_msg["X-Priority"], "1 (Highest)") # Get instance of mocked SMTP object smtp_instance = mock_smtp.return_value # Checks the mock has been called at least one time @@ -166,34 +181,29 @@ def test_smtp_called_once(self): self.assertEqual(smtp_instance.sendmail.call_count, 1) # Check the mock' calls are equal to a specific list of calls in a # specific order - to_addrs = ['bill_lumberg@initech.com', 'milton@initech.com', 'peter@initech.com'] + to_addrs = ["bill_lumberg@initech.com", "milton@initech.com", "peter@initech.com"] self.assertEqual( smtp_instance.sendmail.mock_calls, - [call(MAILER_CONFIG['from_address'], to_addrs, mimetext_msg.as_string())] + [call(MAILER_CONFIG["from_address"], to_addrs, mimetext_msg.as_string())], ) def test_smtp_called_multiple_times(self): SQS_MESSAGE = copy.deepcopy(SQS_MESSAGE_1) - SQS_MESSAGE['action'].pop('priority_header', None) + SQS_MESSAGE["action"].pop("priority_header", None) RESOURCE_2 = { - 'AvailabilityZone': 'us-east-1a', - 'Attachments': [], - 'Tags': [ - { - 'Value': 'samir@initech.com', - 'Key': 'SupportEmail' - } - ], - 'VolumeId': 'vol-01a0e6ea6b8lsdkj93' + "AvailabilityZone": "us-east-1a", + "Attachments": [], + "Tags": [{"Value": "samir@initech.com", "Key": "SupportEmail"}], + "VolumeId": "vol-01a0e6ea6b8lsdkj93", } - SQS_MESSAGE['resources'].append(RESOURCE_2) + SQS_MESSAGE["resources"].append(RESOURCE_2) to_addrs_to_email_messages_map = self.email_delivery.get_to_addrs_email_messages_map( SQS_MESSAGE ) with patch("smtplib.SMTP") as mock_smtp: for email_addrs, mimetext_msg in to_addrs_to_email_messages_map.items(): self.email_delivery.send_c7n_email(SQS_MESSAGE, list(email_addrs), mimetext_msg) - self.assertEqual(mimetext_msg.get('X-Priority'), None) + self.assertEqual(mimetext_msg.get("X-Priority"), None) # self.assertEqual(mimetext_msg.get('X-Priority'), None) # Get instance of mocked SMTP object smtp_instance = mock_smtp.return_value @@ -204,52 +214,38 @@ def test_smtp_called_multiple_times(self): def test_emails_resource_mapping_multiples(self): SQS_MESSAGE = copy.deepcopy(SQS_MESSAGE_1) - SQS_MESSAGE['action'].pop('priority_header', None) + SQS_MESSAGE["action"].pop("priority_header", None) RESOURCE_2 = { - 'AvailabilityZone': 'us-east-1a', - 'Attachments': [], - 'Tags': [ - { - 'Value': 'samir@initech.com', - 'Key': 'SupportEmail' - } - ], - 'VolumeId': 'vol-01a0e6ea6b8lsdkj93' + "AvailabilityZone": "us-east-1a", + "Attachments": [], + "Tags": [{"Value": "samir@initech.com", "Key": "SupportEmail"}], + "VolumeId": "vol-01a0e6ea6b8lsdkj93", } - SQS_MESSAGE['resources'].append(RESOURCE_2) + SQS_MESSAGE["resources"].append(RESOURCE_2) emails_to_resources_map = self.email_delivery.get_email_to_addrs_to_resources_map( SQS_MESSAGE ) - email_1_to_addrs = ('bill_lumberg@initech.com', 'milton@initech.com', 'peter@initech.com') - email_2_to_addrs = ('samir@initech.com',) + email_1_to_addrs = ("bill_lumberg@initech.com", "milton@initech.com", "peter@initech.com") + email_2_to_addrs = ("samir@initech.com",) self.assertEqual(emails_to_resources_map[email_1_to_addrs], [RESOURCE_1]) self.assertEqual(emails_to_resources_map[email_2_to_addrs], [RESOURCE_2]) def test_emails_resource_mapping_no_owner(self): SQS_MESSAGE = copy.deepcopy(SQS_MESSAGE_1) - SQS_MESSAGE['action'].pop('priority_header', None) - SQS_MESSAGE['action']['owner_absent_contact'] = ['foo@example.com'] + SQS_MESSAGE["action"].pop("priority_header", None) + SQS_MESSAGE["action"]["owner_absent_contact"] = ["foo@example.com"] RESOURCE_2 = { - 'AvailabilityZone': 'us-east-1a', - 'Attachments': [], - 'Tags': [ - { - 'Value': 'peter', - 'Key': 'CreatorName' - } - ], - 'VolumeId': 'vol-01a0e6ea6b89f0099' + "AvailabilityZone": "us-east-1a", + "Attachments": [], + "Tags": [{"Value": "peter", "Key": "CreatorName"}], + "VolumeId": "vol-01a0e6ea6b89f0099", } - SQS_MESSAGE['resources'] = [RESOURCE_2] + SQS_MESSAGE["resources"] = [RESOURCE_2] emails_to_resources_map = self.email_delivery.get_email_to_addrs_to_resources_map( SQS_MESSAGE ) - email_1_to_addrs = ( - 'bill_lumberg@initech.com', 'foo@example.com', 'peter@initech.com' - ) - self.assertEqual( - emails_to_resources_map[email_1_to_addrs], [RESOURCE_2] - ) + email_1_to_addrs = ("bill_lumberg@initech.com", "foo@example.com", "peter@initech.com") + self.assertEqual(emails_to_resources_map[email_1_to_addrs], [RESOURCE_2]) def test_no_mapping_if_no_valid_emails(self): SQS_MESSAGE = copy.deepcopy(SQS_MESSAGE_1) @@ -262,62 +258,51 @@ def test_no_mapping_if_no_valid_emails(self): def test_flattened_list_get_resource_owner_emails_from_resource(self): RESOURCE_2 = { - 'AvailabilityZone': 'us-east-1a', - 'Attachments': [], - 'Tags': [ - { - 'Value': '123456', - 'Key': 'OwnerEmail' - } - ], - 'VolumeId': 'vol-01a0e6ea6b8lsdkj93' + "AvailabilityZone": "us-east-1a", + "Attachments": [], + "Tags": [{"Value": "123456", "Key": "OwnerEmail"}], + "VolumeId": "vol-01a0e6ea6b8lsdkj93", } RESOURCE_3 = { - 'AvailabilityZone': 'us-east-1a', - 'Attachments': [], - 'Tags': [ - { - 'Value': 'milton@initech.com', - 'Key': 'OwnerEmail' - } - ], - 'VolumeId': 'vol-01a0e6ea6b8lsdkj93' + "AvailabilityZone": "us-east-1a", + "Attachments": [], + "Tags": [{"Value": "milton@initech.com", "Key": "OwnerEmail"}], + "VolumeId": "vol-01a0e6ea6b8lsdkj93", } ldap_emails = self.email_delivery.get_resource_owner_emails_from_resource( - SQS_MESSAGE_1, - RESOURCE_2 + SQS_MESSAGE_1, RESOURCE_2 ) - self.assertEqual(ldap_emails, ['milton@initech.com']) + self.assertEqual(ldap_emails, ["milton@initech.com"]) ldap_emails = self.email_delivery.get_resource_owner_emails_from_resource( - SQS_MESSAGE_1, - RESOURCE_3 + SQS_MESSAGE_1, RESOURCE_3 ) - self.assertEqual(ldap_emails, ['milton@initech.com']) + self.assertEqual(ldap_emails, ["milton@initech.com"]) def test_get_resource_owner_emails_from_resource_org_domain_not_invoked(self): config = copy.deepcopy(MAILER_CONFIG) logger_mock = MagicMock() # Enable org_domain - config['org_domain'] = "test.com" + config["org_domain"] = "test.com" # Add "CreatorName" to contact tags to avoid creating a new # resource. - config['contact_tags'].append('CreatorName') + config["contact_tags"].append("CreatorName") self.email_delivery = MockEmailDelivery(config, self.aws_session, logger_mock) org_emails = self.email_delivery.get_resource_owner_emails_from_resource( - SQS_MESSAGE_1, - RESOURCE_1 + SQS_MESSAGE_1, RESOURCE_1 ) - assert org_emails == ['milton@initech.com', 'peter@initech.com'] - assert call("Using org_domain to reconstruct email addresses from contact_tags values") \ + assert org_emails == ["milton@initech.com", "peter@initech.com"] + assert ( + call("Using org_domain to reconstruct email addresses from contact_tags values") not in logger_mock.debug.call_args_list + ) def test_get_resource_owner_emails_from_resource_org_domain(self): config = copy.deepcopy(MAILER_CONFIG) @@ -325,41 +310,56 @@ def test_get_resource_owner_emails_from_resource_org_domain(self): # Enable org_domain and disable ldap lookups # If ldap lookups are enabled, org_domain logic is not invoked. - config['org_domain'] = "test.com" - del config['ldap_uri'] + config["org_domain"] = "test.com" + del config["ldap_uri"] # Add "CreatorName" to contact tags to avoid creating a new # resource. - config['contact_tags'].append('CreatorName') + config["contact_tags"].append("CreatorName") self.email_delivery = MockEmailDelivery(config, self.aws_session, logger_mock) org_emails = self.email_delivery.get_resource_owner_emails_from_resource( - SQS_MESSAGE_1, - RESOURCE_1 + SQS_MESSAGE_1, RESOURCE_1 ) - assert org_emails == ['milton@initech.com', 'peter@test.com'] + assert org_emails == ["milton@initech.com", "peter@test.com"] logger_mock.debug.assert_called_with( - "Using org_domain to reconstruct email addresses from contact_tags values") + "Using org_domain to reconstruct email addresses from contact_tags values" + ) def test_cc_email_functionality(self): email = get_mimetext_message( - self.email_delivery.config, self.email_delivery.logger, - SQS_MESSAGE_4, SQS_MESSAGE_4['resources'], ['hello@example.com']) - self.assertEqual(email['Cc'], 'hello@example.com, cc@example.com') + self.email_delivery.config, + self.email_delivery.logger, + SQS_MESSAGE_4, + SQS_MESSAGE_4["resources"], + ["hello@example.com"], + ) + self.assertEqual(email["Cc"], "hello@example.com, cc@example.com") def test_sendgrid(self): config = copy.deepcopy(MAILER_CONFIG) logger_mock = MagicMock() - config['sendgrid_api_key'] = 'SENDGRID_API_KEY' - del config['smtp_server'] + config["sendgrid_api_key"] = "SENDGRID_API_KEY" + del config["smtp_server"] delivery = MockEmailDelivery(config, self.aws_session, logger_mock) with patch("sendgrid.SendGridAPIClient.send") as mock_send: - with patch('c7n_mailer.utils.kms_decrypt') as mock_decrypt: - mock_decrypt.return_value = 'xyz' + with patch("c7n_mailer.utils.kms_decrypt") as mock_decrypt: + mock_decrypt.return_value = "xyz" delivery.send_c7n_email(SQS_MESSAGE_1, None, None) mock_decrypt.assert_called_once() mock_send.assert_called() + + def test_get_ldap_connection(self): + with patch("c7n_mailer.email_delivery.decrypt") as patched: + patched.return_value = "a password" + delivery = EmailDelivery( + {"ldap_uri": "foo"}, + self.aws_session, + MagicMock() + ) + patched.assert_called() + self.assertEqual(delivery.config['ldap_bind_password'], "a password") diff --git a/tools/c7n_mailer/tests/test_gcp.py b/tools/c7n_mailer/tests/test_gcp.py new file mode 100644 index 00000000000..33599a961a4 --- /dev/null +++ b/tools/c7n_mailer/tests/test_gcp.py @@ -0,0 +1,141 @@ +# Copyright The Cloud Custodian Authors. +# SPDX-License-Identifier: Apache-2.0 +import base64 +import json +import unittest +import zlib + +from common import ( + logger, + MAILER_CONFIG_GCP, + GCP_MESSAGE, + GCP_MESSAGES, + PUBSUB_MESSAGE_DATADOG, +) +from c7n_mailer.gcp_mailer.gcp_queue_processor import MailerGcpQueueProcessor +from c7n_mailer.email_delivery import EmailDelivery +from c7n_mailer.utils import get_provider +from mock import call, MagicMock, patch + + +class GcpTest(unittest.TestCase): + def setUp(self): + self.compressed_message = GCP_MESSAGE + self.loaded_message = json.loads(GCP_MESSAGE) + + def _pull_messages(self, count=0): + template = { + "message": { + "data": "", + "attributes": {}, + "messageId": "", + "orderingKey": "", + "publishTime": "a time" + }, + "ackId": "", + "deliveryAttempt": "" + } + result = [] + for i in range(count): + result.append(template) + return {"receivedMessages": result} + + @patch.object(EmailDelivery, "send_c7n_email") + def test_process_message(self, mock_email): + mock_email.return_value = True + processor = MailerGcpQueueProcessor(MAILER_CONFIG_GCP, logger) + self.assertTrue( + processor.process_message( + GCP_MESSAGES["receivedMessages"][0], + GCP_MESSAGES['receivedMessages'][0]['message']['publishTime'] + ) + ) + mock_email.assert_called() + + def test_receive_message(self): + patched_client = MagicMock() + patched_client.execute_command.return_value = self._pull_messages(1) + processor = MailerGcpQueueProcessor(MAILER_CONFIG_GCP, logger) + processor.client = patched_client + messages = processor.receive_messages() + self.assertEqual(len(messages['receivedMessages']), 1) + patched_client.execute_command.assert_called_with( + "pull", + { + "subscription": "projects/c7n-dev/subscriptions/getnotify", + "body": {"returnImmediately": True, "max_messages": 1000} + } + ) + + def test_ack_message(self): + patched_client = MagicMock() + patched_client.execute_command.return_value = {} + processor = MailerGcpQueueProcessor(MAILER_CONFIG_GCP, logger) + processor.client = patched_client + processor.ack_messages("2019-05-13T18:31:17.926Z") + patched_client.execute_command.assert_called_with( + "seek", + { + "subscription": "projects/c7n-dev/subscriptions/getnotify", + "body": {"time": "2019-05-13T18:31:17.926Z"} + } + ) + + @patch.object(MailerGcpQueueProcessor, "receive_messages") + def test_run_empty_receive(self, mock_receive): + mock_receive.return_value = self._pull_messages(0) + processor = MailerGcpQueueProcessor(MAILER_CONFIG_GCP, logger) + + processor.run() + + def test_is_gcp_cloud(self): + self.assertEqual(get_provider(MAILER_CONFIG_GCP), 2) + + @patch("common.logger.info") + @patch.object(MailerGcpQueueProcessor, "receive_messages") + def test_processor_run_logging(self, mock_receive, mock_log): + mock_receive.return_value = self._pull_messages(0) + processor = MailerGcpQueueProcessor(MAILER_CONFIG_GCP, logger) + processor.run() + mock_log.assert_called_with( + "No messages left in the gcp topic subscription," " now exiting c7n_mailer." + ) + + @patch("c7n_mailer.datadog_delivery.DataDogDelivery") + def test_datadog_delivery(self, mock_datadog): + datadog_mailer_config = { + "queue_url": "projects/c7n-dev/subscriptions/getnotify", + "datadog_api_key": "mock_api_key", + "datadog_application_key": "mock_application_key", + } + + datadog_compressed_message = MagicMock() + datadog_compressed_message.content = base64.b64encode( + zlib.compress(PUBSUB_MESSAGE_DATADOG.encode("utf8")) + ) + datadog_loaded_message = json.loads(PUBSUB_MESSAGE_DATADOG) + + mock_datadog.return_value.get_datadog_message_packages.return_value = ( + "mock_datadog_message_map" + ) + + pubsub_message = {"message": {"data": datadog_compressed_message.content}} + gcp_processor = MailerGcpQueueProcessor(datadog_mailer_config, logger) + gcp_processor.process_message(pubsub_message, "a timestamp") + + mock_datadog.assert_has_calls( + [call().deliver_datadog_messages("mock_datadog_message_map", datadog_loaded_message)] + ) + + @patch.object(MailerGcpQueueProcessor, "ack_messages") + @patch.object(MailerGcpQueueProcessor, "process_message") + @patch.object(MailerGcpQueueProcessor, "receive_messages") + def test_gcp_queue_processor_run(self, mock_receive, mock_process_message, mock_ack_messages): + mock_receive.side_effect = [ + self._pull_messages(1), + self._pull_messages(0), + ] + processor = MailerGcpQueueProcessor(MAILER_CONFIG_GCP, logger) + processor.run() + mock_process_message.assert_called() + mock_ack_messages.assert_called() diff --git a/tools/c7n_mailer/tests/test_gcp_mailer_utils.py b/tools/c7n_mailer/tests/test_gcp_mailer_utils.py new file mode 100644 index 00000000000..6120688b867 --- /dev/null +++ b/tools/c7n_mailer/tests/test_gcp_mailer_utils.py @@ -0,0 +1,39 @@ +# Copyright The Cloud Custodian Authors. +# SPDX-License-Identifier: Apache-2.0 + +import unittest + +from c7n_mailer.gcp_mailer.utils import gcp_decrypt, CACHE +from mock import MagicMock + + +class GcpUtilsTest(unittest.TestCase): + def test_gcp_decrypt_raw(self): + self.assertEqual(gcp_decrypt({"test": "value"}, MagicMock(), "test", MagicMock()), "value") + + def test_gcp_decrypt_raw_latest(self): + mock_client = MagicMock() + mocked_response = MagicMock() + mocked_response.payload.data = b"secret value" + mock_client.access_secret_version.return_value = mocked_response + self.assertEqual( + gcp_decrypt( + {"test": {"secret": "foo"}}, + MagicMock(), + "test", + mock_client + ), + "secret value") + mock_client.access_secret_version.assert_called_with(name="foo/versions/latest") + self.assertTrue("foo/versions/latest" in CACHE) + # the value should be cached and we should only see one access secret version call + value = gcp_decrypt( + {"test": {"secret": "foo"}}, + MagicMock(), + "test", + mock_client + ) + mock_client.access_secret_version.assert_called_once() + + # of course, the value of the secret should not have changed + self.assertEqual(value, "secret value") diff --git a/tools/c7n_mailer/tests/test_ldap.py b/tools/c7n_mailer/tests/test_ldap.py index 6e3cd649f3f..6202bfc8f15 100644 --- a/tools/c7n_mailer/tests/test_ldap.py +++ b/tools/c7n_mailer/tests/test_ldap.py @@ -11,79 +11,78 @@ class MailerLdapTest(unittest.TestCase): - def setUp(self): if not have_sqlite: return - self.ldap_lookup = get_ldap_lookup(cache_engine='sqlite') + self.ldap_lookup = get_ldap_lookup(cache_engine="sqlite") def test_sqlite_cached_get_mail(self): - michael_bolton = self.ldap_lookup.caching.get('michael_bolton') - self.assertEqual(michael_bolton.get('mail'), 'michael_bolton@initech.com') + michael_bolton = self.ldap_lookup.caching.get("michael_bolton") + self.assertEqual(michael_bolton.get("mail"), "michael_bolton@initech.com") def test_regex_requiring_underscore(self): - self.ldap_lookup.uid_regex = '_' - michael_bolton = self.ldap_lookup.get_metadata_from_uid('michael_bolton') + self.ldap_lookup.uid_regex = "_" + michael_bolton = self.ldap_lookup.get_metadata_from_uid("michael_bolton") # since michael_bolton has an underscore, it should pass regex and return a result - self.assertEqual(michael_bolton.get('mail'), 'michael_bolton@initech.com') - milton = self.ldap_lookup.get_metadata_from_uid('123456') + self.assertEqual(michael_bolton.get("mail"), "michael_bolton@initech.com") + milton = self.ldap_lookup.get_metadata_from_uid("123456") # since '123456' doesn't have an underscore, it should return {} self.assertEqual(milton, {}) def test_sqlite_cache_set_escaping(self): irish_guy = { - 'dn': 'uid=john_oconnor,cn=users,dc=initech,dc=com', - 'mail': 'john_oconnor@initech.com', - 'manager': 'uid=bill_lumbergh,cn=users,dc=initech,dc=com', - 'displayName': "John O'Connor", - 'uid': 'john_oconnor' + "dn": "uid=john_oconnor,cn=users,dc=initech,dc=com", + "mail": "john_oconnor@initech.com", + "manager": "uid=bill_lumbergh,cn=users,dc=initech,dc=com", + "displayName": "John O'Connor", + "uid": "john_oconnor", } - set_result = self.ldap_lookup.caching.set(irish_guy['uid'], irish_guy) + set_result = self.ldap_lookup.caching.set(irish_guy["uid"], irish_guy) self.assertEqual(set_result, None) - get_result = self.ldap_lookup.caching.get(irish_guy['uid']) + get_result = self.ldap_lookup.caching.get(irish_guy["uid"]) self.assertEqual(get_result, irish_guy) def test_regex_requiring_6chars_and_only_digits(self): # now we'll do some tests requiring the uid to be 6 characters only and digits - self.ldap_lookup.uid_regex = '^[0-9]{6}$' - milton = self.ldap_lookup.get_metadata_from_uid('123456') - milton_email = milton.get('mail') - self.assertEqual(milton_email, 'milton@initech.com') + self.ldap_lookup.uid_regex = "^[0-9]{6}$" + milton = self.ldap_lookup.get_metadata_from_uid("123456") + milton_email = milton.get("mail") + self.assertEqual(milton_email, "milton@initech.com") def test_sqlite_cached_get_email_to_addr_without_manager(self): - to_addr = self.ldap_lookup.get_email_to_addrs_from_uid('michael_bolton') - self.assertEqual(to_addr, ['michael_bolton@initech.com']) + to_addr = self.ldap_lookup.get_email_to_addrs_from_uid("michael_bolton") + self.assertEqual(to_addr, ["michael_bolton@initech.com"]) def test_sqlite_cached_get_email_to_addrs_with_manager(self): - to_addr = self.ldap_lookup.get_email_to_addrs_from_uid('michael_bolton', manager=True) - self.assertEqual(to_addr, ['michael_bolton@initech.com', 'milton@initech.com']) + to_addr = self.ldap_lookup.get_email_to_addrs_from_uid("michael_bolton", manager=True) + self.assertEqual(to_addr, ["michael_bolton@initech.com", "milton@initech.com"]) def test_uid_ldap_lookup(self): - ldap_result = self.ldap_lookup.get_metadata_from_uid('peter') - self.assertEqual(ldap_result['mail'], PETER[1]['mail'][0]) - self.assertEqual(ldap_result['uid'], PETER[1]['uid'][0]) + ldap_result = self.ldap_lookup.get_metadata_from_uid("peter") + self.assertEqual(ldap_result["mail"], PETER[1]["mail"][0]) + self.assertEqual(ldap_result["uid"], PETER[1]["uid"][0]) # make sure it set a value in the cache as well. - cached_result = self.ldap_lookup.caching.get('peter') - self.assertEqual(cached_result['mail'], PETER[1]['mail'][0]) - self.assertEqual(cached_result['uid'], PETER[1]['uid'][0]) + cached_result = self.ldap_lookup.caching.get("peter") + self.assertEqual(cached_result["mail"], PETER[1]["mail"][0]) + self.assertEqual(cached_result["uid"], PETER[1]["uid"][0]) def test_dn_ldap_lookup(self): bill_metadata = self.ldap_lookup.get_metadata_from_dn(BILL[0]) - self.assertEqual(bill_metadata['mail'], BILL[1]['mail'][0]) + self.assertEqual(bill_metadata["mail"], BILL[1]["mail"][0]) def test_to_addr_with_ldap_query(self): - to_addr = self.ldap_lookup.get_email_to_addrs_from_uid('peter', manager=True) - self.assertEqual(to_addr, ['peter@initech.com', 'bill_lumberg@initech.com']) + to_addr = self.ldap_lookup.get_email_to_addrs_from_uid("peter", manager=True) + self.assertEqual(to_addr, ["peter@initech.com", "bill_lumberg@initech.com"]) def test_that_dn_and_uid_write_to_cache_on_manager_lookup(self): bill_metadata = self.ldap_lookup.get_metadata_from_dn(BILL[0]) bill_metadata_dn_lookup_cache = self.ldap_lookup.caching.get(BILL[0]) self.assertEqual(bill_metadata, bill_metadata_dn_lookup_cache) - bill_metadata_uid_lookup_cache = self.ldap_lookup.caching.get(BILL[1]['uid'][0]) + bill_metadata_uid_lookup_cache = self.ldap_lookup.caching.get(BILL[1]["uid"][0]) self.assertEqual(bill_metadata, bill_metadata_uid_lookup_cache) def test_that_dn_and_uid_write_to_cache_on_employee_lookup(self): - peter_uid = PETER[1]['uid'][0] + peter_uid = PETER[1]["uid"][0] peter_metadata = self.ldap_lookup.get_metadata_from_uid(peter_uid) peter_metadata_dn_lookup_cache = self.ldap_lookup.caching.get(PETER[0]) peter_metadata_uid_lookup_cache = self.ldap_lookup.caching.get(peter_uid) @@ -93,8 +92,8 @@ def test_that_dn_and_uid_write_to_cache_on_employee_lookup(self): def test_random_string_dont_hit_ldap_twice_uid_lookup(self): # if we query ldap and get no result, we should never query ldap again # for that result, we should query the cache and just return {} - to_addr = self.ldap_lookup.get_email_to_addrs_from_uid('doesnotexist', manager=True) + to_addr = self.ldap_lookup.get_email_to_addrs_from_uid("doesnotexist", manager=True) self.assertEqual(to_addr, []) self.ldap_lookup.connection = None - to_addr = self.ldap_lookup.get_email_to_addrs_from_uid('doesnotexist', manager=True) + to_addr = self.ldap_lookup.get_email_to_addrs_from_uid("doesnotexist", manager=True) self.assertEqual(to_addr, []) diff --git a/tools/c7n_mailer/tests/test_misc.py b/tools/c7n_mailer/tests/test_misc.py index d3f5736656a..3ae1a586888 100644 --- a/tools/c7n_mailer/tests/test_misc.py +++ b/tools/c7n_mailer/tests/test_misc.py @@ -11,43 +11,51 @@ from c7n_mailer import sqs_queue_processor from c7n_mailer import cli from c7n_mailer import deploy +from c7n_mailer.azure_mailer import azure_queue_processor +from c7n_mailer.gcp_mailer import gcp_queue_processor from c7n.mu import PythonPackageArchive -from common import MAILER_CONFIG +from common import MAILER_CONFIG, MAILER_CONFIG_GCP, MAILER_CONFIG_AZURE class AWSMailerTests(unittest.TestCase): - def test_replay_parser_creation(self): parser = replay.setup_parser() self.assertIs(parser.__class__, argparse.ArgumentParser) def test_mailer_handle(self): - handle.start_c7n_mailer(logging.getLogger('c7n_mailer'), MAILER_CONFIG, False) - http_proxy = 'username:password@my.proxy.com:80' - https_proxy = 'username:password@my.proxy.com:443' - MAILER_CONFIG['http_proxy'] = http_proxy - MAILER_CONFIG['https_proxy'] = https_proxy + handle.start_c7n_mailer(logging.getLogger("c7n_mailer"), MAILER_CONFIG, False) + http_proxy = "username:password@my.proxy.com:80" + https_proxy = "username:password@my.proxy.com:443" + MAILER_CONFIG["http_proxy"] = http_proxy + MAILER_CONFIG["https_proxy"] = https_proxy config = handle.config_setup(MAILER_CONFIG) self.assertEqual( - [ - config.get('http_proxy'), - config.get('https_proxy') - ], - [ - http_proxy, - https_proxy - ] + [config.get("http_proxy"), config.get("https_proxy")], [http_proxy, https_proxy] ) # Clear http proxy - MAILER_CONFIG['http_proxy'] = '' - MAILER_CONFIG['https_proxy'] = '' + MAILER_CONFIG["http_proxy"] = "" + MAILER_CONFIG["https_proxy"] = "" config = handle.config_setup(MAILER_CONFIG) def test_sqs_queue_processor(self): mailer_sqs_queue_processor = sqs_queue_processor.MailerSqsQueueProcessor( - MAILER_CONFIG, boto3.Session(), logging.getLogger('c7n_mailer')) - self.assertIs(mailer_sqs_queue_processor.__class__, - sqs_queue_processor.MailerSqsQueueProcessor) + MAILER_CONFIG, boto3.Session(), logging.getLogger("c7n_mailer") + ) + self.assertIs( + mailer_sqs_queue_processor.__class__, sqs_queue_processor.MailerSqsQueueProcessor + ) + + def test_azure_queue_processor(self): + processor = azure_queue_processor.MailerAzureQueueProcessor( + MAILER_CONFIG_AZURE, logging.getLogger("c7n_mailer") + ) + self.assertIs(processor.__class__, azure_queue_processor.MailerAzureQueueProcessor) + + def test_gcp_queue_processor(self): + processor = gcp_queue_processor.MailerGcpQueueProcessor( + MAILER_CONFIG_GCP, logging.getLogger("c7n_mailer") + ) + self.assertIs(processor.__class__, gcp_queue_processor.MailerGcpQueueProcessor) def test_cli_run(self): # Generate loggers and make sure they have the right class, for codecov @@ -56,16 +64,12 @@ def test_cli_run(self): parser = cli.get_c7n_mailer_parser() self.assertIs(parser.__class__, argparse.ArgumentParser) session = cli.session_factory(MAILER_CONFIG) - self.assertEqual( - [session.region_name, session.profile_name], - ['us-east-1', 'default'] - ) + self.assertEqual([session.region_name, session.profile_name], ["us-east-1", "default"]) class DeployTests(unittest.TestCase): - def test_get_archive(self): - archive = deploy.get_archive({'templates_folders': []}) + archive = deploy.get_archive({"templates_folders": []}) assert isinstance(archive, PythonPackageArchive) # basic sanity checks using random, low values assert archive.size > 10000 # this should really be about 1.5 MB diff --git a/tools/c7n_mailer/tests/test_schema.py b/tools/c7n_mailer/tests/test_schema.py index b46a5eab6f9..b3375a1967b 100644 --- a/tools/c7n_mailer/tests/test_schema.py +++ b/tools/c7n_mailer/tests/test_schema.py @@ -9,18 +9,17 @@ class MailerSchemaTest(unittest.TestCase): - def test_validate_secured_string(self): - property_schema = {'type': 'object', 'properties': {'test': cli.SECURED_STRING_SCHEMA}} - jsonschema.validate({'test': 'raw_string'}, property_schema) - jsonschema.validate({'test': {'type': 'azure.keyvault', - 'secret': 'https://secret_uri'}}, property_schema) + property_schema = {"type": "object", "properties": {"test": cli.SECURED_STRING_SCHEMA}} + jsonschema.validate({"test": "raw_string"}, property_schema) + jsonschema.validate( + {"test": {"type": "azure.keyvault", "secret": "https://secret_uri"}}, property_schema + ) with self.assertRaises(exceptions.ValidationError): - jsonschema.validate({'test': {'wrong': 'value'}}, - property_schema) - jsonschema.validate({'test': {'secret': 'https://secret_uri'}}, - property_schema) - jsonschema.validate({'test': {'type': 'azure.keyvault', - 'secret': 'https://secret_uri', 'extra': 'e'}}, - property_schema) + jsonschema.validate({"test": {"wrong": "value"}}, property_schema) + jsonschema.validate({"test": {"secret": "https://secret_uri"}}, property_schema) + jsonschema.validate( + {"test": {"type": "azure.keyvault", "secret": "https://secret_uri", "extra": "e"}}, + property_schema, + ) diff --git a/tools/c7n_mailer/tests/test_slack.py b/tools/c7n_mailer/tests/test_slack.py index 4afe83dec23..fc0011f325d 100644 --- a/tools/c7n_mailer/tests/test_slack.py +++ b/tools/c7n_mailer/tests/test_slack.py @@ -18,12 +18,12 @@ class TestSlackDelivery(unittest.TestCase): def setUp(self): self.config = { - 'slack_token': SLACK_TOKEN, - 'templates_folders': [ + "slack_token": SLACK_TOKEN, + "templates_folders": [ os.path.abspath(os.path.dirname(__file__)), - os.path.abspath('/'), - os.path.join(os.path.abspath(os.path.dirname(__file__)), "test-templates/") - ] + os.path.abspath("/"), + os.path.join(os.path.abspath(os.path.dirname(__file__)), "test-templates/"), + ], } self.session = MagicMock() @@ -32,54 +32,54 @@ def setUp(self): self.email_delivery = EmailDelivery(self.config, self.session, self.logger) self.message = copy.deepcopy(SQS_MESSAGE_5) self.resource = copy.deepcopy(RESOURCE_3) - self.message['resources'] = [self.resource] - self.target_channel = 'test-channel' + self.message["resources"] = [self.resource] + self.target_channel = "test-channel" def test_map_sending_to_channel(self): slack = SlackDelivery(self.config, self.logger, self.email_delivery) result = slack.get_to_addrs_slack_messages_map(self.message) assert self.target_channel in result - assert json.loads(result[self.target_channel])['channel'] == self.target_channel + assert json.loads(result[self.target_channel])["channel"] == self.target_channel def test_map_sending_to_tag_channel_with_hash(self): - self.target_channel = '#tag-channel' + self.target_channel = "#tag-channel" slack = SlackDelivery(self.config, self.logger, self.email_delivery) - message_destination = ['slack://tag/SlackChannel'] + message_destination = ["slack://tag/SlackChannel"] - self.resource['Tags'].append({"Key": "SlackChannel", "Value": self.target_channel}) - self.message['action']['to'] = message_destination - self.message['policy']['actions'][1]['to'] = message_destination + self.resource["Tags"].append({"Key": "SlackChannel", "Value": self.target_channel}) + self.message["action"]["to"] = message_destination + self.message["policy"]["actions"][1]["to"] = message_destination result = slack.get_to_addrs_slack_messages_map(self.message) assert self.target_channel in result - assert json.loads(result[self.target_channel])['channel'] == self.target_channel + assert json.loads(result[self.target_channel])["channel"] == self.target_channel self.logger.debug.assert_called_with("Generating message for specified Slack channel.") def test_map_sending_to_tag_channel_without_hash(self): - self.target_channel = 'tag-channel' + self.target_channel = "tag-channel" channel_name = "#" + self.target_channel slack = SlackDelivery(self.config, self.logger, self.email_delivery) - message_destination = ['slack://tag/SlackChannel'] + message_destination = ["slack://tag/SlackChannel"] - self.resource['Tags'].append({"Key": "SlackChannel", "Value": self.target_channel}) - self.message['action']['to'] = message_destination - self.message['policy']['actions'][1]['to'] = message_destination + self.resource["Tags"].append({"Key": "SlackChannel", "Value": self.target_channel}) + self.message["action"]["to"] = message_destination + self.message["policy"]["actions"][1]["to"] = message_destination result = slack.get_to_addrs_slack_messages_map(self.message) assert channel_name in result - assert json.loads(result[channel_name])['channel'] == channel_name + assert json.loads(result[channel_name])["channel"] == channel_name self.logger.debug.assert_called_with("Generating message for specified Slack channel.") def test_map_sending_to_tag_channel_no_tag(self): slack = SlackDelivery(self.config, self.logger, self.email_delivery) - message_destination = ['slack://tag/SlackChannel'] - self.message['action']['to'] = message_destination - self.message['policy']['actions'][1]['to'] = message_destination + message_destination = ["slack://tag/SlackChannel"] + self.message["action"]["to"] = message_destination + self.message["policy"]["actions"][1]["to"] = message_destination result = slack.get_to_addrs_slack_messages_map(self.message) @@ -92,75 +92,78 @@ def test_map_sending_to_webhook(self): slack = SlackDelivery(self.config, self.logger, self.email_delivery) message_destination = [webhook] - self.message['action']['to'] = message_destination - self.message['policy']['actions'][1]['to'] = message_destination + self.message["action"]["to"] = message_destination + self.message["policy"]["actions"][1]["to"] = message_destination result = slack.get_to_addrs_slack_messages_map(self.message) assert webhook in result - assert 'channel' not in json.loads(result[webhook]) + assert "channel" not in json.loads(result[webhook]) - @patch('c7n_mailer.slack_delivery.requests.post') + @patch("c7n_mailer.slack_delivery.requests.post") def test_slack_handler(self, mock_post): mock_post.return_value.status_code = 200 - mock_post.return_value.json.return_value = {'ok': True} + mock_post.return_value.json.return_value = {"ok": True} slack = SlackDelivery(self.config, self.logger, self.email_delivery) result = slack.get_to_addrs_slack_messages_map(self.message) slack.slack_handler(self.message, result) - self.logger.info.assert_called_with("Sending account:core-services-dev " - "policy:ebs-mark-unattached-deletion ebs:1 slack:slack" - "_default to test-channel") + self.logger.info.assert_called_with( + "Sending account:core-services-dev " + "policy:ebs-mark-unattached-deletion ebs:1 slack:slack" + "_default to test-channel" + ) - @patch('c7n_mailer.slack_delivery.requests.post') + @patch("c7n_mailer.slack_delivery.requests.post") def test_send_slack_msg_webhook(self, mock_post): mock_post.return_value.status_code = 200 - mock_post.return_value.json.return_value = {'ok': True} + mock_post.return_value.json.return_value = {"ok": True} webhook = "https://hooks.slack.com/this-is-a-webhook" message_destination = [webhook] - self.message['action']['to'] = message_destination - self.message['policy']['actions'][1]['to'] = message_destination + self.message["action"]["to"] = message_destination + self.message["policy"]["actions"][1]["to"] = message_destination slack = SlackDelivery(self.config, self.logger, self.email_delivery) result = slack.get_to_addrs_slack_messages_map(self.message) slack.send_slack_msg(webhook, result[webhook]) args, kwargs = mock_post.call_args - assert webhook == kwargs['url'] - assert kwargs['data'] == result[webhook] + assert webhook == kwargs["url"] + assert kwargs["data"] == result[webhook] - @patch('c7n_mailer.slack_delivery.requests.post') + @patch("c7n_mailer.slack_delivery.requests.post") def test_send_slack_msg(self, mock_post): mock_post.return_value.status_code = 200 - mock_post.return_value.json.return_value = {'ok': True} + mock_post.return_value.json.return_value = {"ok": True} slack = SlackDelivery(self.config, self.logger, self.email_delivery) result = slack.get_to_addrs_slack_messages_map(self.message) slack.send_slack_msg(self.target_channel, result[self.target_channel]) args, kwargs = mock_post.call_args - assert self.target_channel == json.loads(kwargs['data'])['channel'] - assert SLACK_POST_MESSAGE_API == kwargs['url'] - assert kwargs['data'] == result[self.target_channel] + assert self.target_channel == json.loads(kwargs["data"])["channel"] + assert SLACK_POST_MESSAGE_API == kwargs["url"] + assert kwargs["data"] == result[self.target_channel] - @patch('c7n_mailer.slack_delivery.requests.post') + @patch("c7n_mailer.slack_delivery.requests.post") def test_send_slack_msg_retry_after(self, mock_post): retry_after_delay = 1 mock_post.return_value.status_code = 429 - mock_post.return_value.headers = {'Retry-After': retry_after_delay} + mock_post.return_value.headers = {"Retry-After": retry_after_delay} slack = SlackDelivery(self.config, self.logger, self.email_delivery) result = slack.get_to_addrs_slack_messages_map(self.message) slack.send_slack_msg(self.target_channel, result[self.target_channel]) args, kwargs = mock_post.call_args - self.logger.info.assert_called_with("Slack API rate limiting. Waiting %d seconds", - retry_after_delay) + self.logger.info.assert_called_with( + "Slack API rate limiting. Waiting %d seconds", retry_after_delay + ) - @patch('c7n_mailer.slack_delivery.requests.post') + @patch("c7n_mailer.slack_delivery.requests.post") def test_send_slack_msg_not_200_response(self, mock_post): mock_post.return_value.status_code = 404 mock_post.return_value.text = "channel_not_found" @@ -169,17 +172,19 @@ def test_send_slack_msg_not_200_response(self, mock_post): result = slack.get_to_addrs_slack_messages_map(self.message) slack.send_slack_msg(self.target_channel, result[self.target_channel]) - self.logger.info.assert_called_with('Error in sending Slack message status:%s response: %s', - 404, 'channel_not_found') + self.logger.info.assert_called_with( + "Error in sending Slack message status:%s response: %s", 404, "channel_not_found" + ) - @patch('c7n_mailer.slack_delivery.requests.post') + @patch("c7n_mailer.slack_delivery.requests.post") def test_send_slack_msg_not_ok_response(self, mock_post): mock_post.return_value.status_code = 200 - mock_post.return_value.json.return_value = {'ok': False, 'error': "failed"} + mock_post.return_value.json.return_value = {"ok": False, "error": "failed"} slack = SlackDelivery(self.config, self.logger, self.email_delivery) result = slack.get_to_addrs_slack_messages_map(self.message) slack.send_slack_msg(self.target_channel, result[self.target_channel]) - self.logger.info.assert_called_with('Error in sending Slack message. Status:%s, ' - 'response:%s', 200, 'failed') + self.logger.info.assert_called_with( + "Error in sending Slack message. Status:%s, " "response:%s", 200, "failed" + ) diff --git a/tools/c7n_mailer/tests/test_smtp_delivery.py b/tools/c7n_mailer/tests/test_smtp_delivery.py index e2c92cbf8f5..2af4a20afe3 100644 --- a/tools/c7n_mailer/tests/test_smtp_delivery.py +++ b/tools/c7n_mailer/tests/test_smtp_delivery.py @@ -4,97 +4,118 @@ import unittest +import smtplib + from c7n_mailer.smtp_delivery import SmtpDelivery from mock import patch, call, MagicMock class SmtpDeliveryTest(unittest.TestCase): - - @patch('smtplib.SMTP') + @patch("smtplib.SMTP") def test_no_ssl(self, mock_smtp): config = { - 'smtp_server': 'server', - 'smtp_port': 25, - 'smtp_ssl': False, - 'smtp_username': None, - 'smtp_password': None + "smtp_server": "server", + "smtp_port": 25, + "smtp_ssl": False, + "smtp_username": None, + "smtp_password": None, } d = SmtpDelivery(config, MagicMock(), MagicMock()) del d - mock_smtp.assert_has_calls([call('server', 25), - call().quit()]) + mock_smtp.assert_has_calls([call("server", 25), call().quit()]) - @patch('c7n_mailer.utils.decrypt', return_value='password') - @patch('smtplib.SMTP') + @patch("c7n_mailer.utils.decrypt", return_value="password") + @patch("smtplib.SMTP") def test_no_ssl_with_credentials(self, mock_smtp, decrypt_mock): config = { - 'smtp_server': 'server', - 'smtp_port': 25, - 'smtp_ssl': False, - 'smtp_username': 'username', - 'smtp_password': 'test' + "smtp_server": "server", + "smtp_port": 25, + "smtp_ssl": False, + "smtp_username": "username", + "smtp_password": "password", } d = SmtpDelivery(config, MagicMock(), MagicMock()) del d - mock_smtp.assert_has_calls([call('server', 25), - call().login('username', 'password'), - call().quit()]) + mock_smtp.assert_has_calls( + [call("server", 25), call().login("username", "password"), call().quit()] + ) - @patch('smtplib.SMTP') + @patch("smtplib.SMTP") def test_with_ssl(self, mock_smtp): config = { - 'smtp_server': 'server', - 'smtp_port': 25, - 'smtp_ssl': True, - 'smtp_username': None, - 'smtp_password': None + "smtp_server": "server", + "smtp_port": 25, + "smtp_ssl": True, + "smtp_username": None, + "smtp_password": None, } d = SmtpDelivery(config, MagicMock(), MagicMock()) del d - mock_smtp.assert_has_calls([call('server', 25), - call().starttls(), - call().ehlo(), - call().quit()]) + mock_smtp.assert_has_calls( + [call("server", 25), call().starttls(), call().ehlo(), call().quit()] + ) - @patch('c7n_mailer.utils.decrypt', return_value='password') - @patch('smtplib.SMTP') + @patch("c7n_mailer.utils.decrypt", return_value="password") + @patch("smtplib.SMTP") def test_with_ssl_and_credentials(self, mock_smtp, decrypt_mock): config = { - 'smtp_server': 'server', - 'smtp_port': 25, - 'smtp_ssl': True, - 'smtp_username': 'username', - 'smtp_password': 'test' + "smtp_server": "server", + "smtp_port": 25, + "smtp_ssl": True, + "smtp_username": "username", + "smtp_password": "password", } d = SmtpDelivery(config, MagicMock(), MagicMock()) del d - mock_smtp.assert_has_calls([call('server', 25), - call().starttls(), - call().ehlo(), - call().login('username', 'password'), - call().quit()]) + mock_smtp.assert_has_calls( + [ + call("server", 25), + call().starttls(), + call().ehlo(), + call().login("username", "password"), + call().quit(), + ] + ) - @patch('smtplib.SMTP') + @patch("smtplib.SMTP") def test_send_message(self, mock_smtp): config = { - 'smtp_server': 'server', - 'smtp_port': 25, - 'smtp_ssl': False, - 'smtp_username': None, - 'smtp_password': None + "smtp_server": "server", + "smtp_port": 25, + "smtp_ssl": False, + "smtp_username": None, + "smtp_password": None, } d = SmtpDelivery(config, MagicMock(), MagicMock()) message_mock = MagicMock() - message_mock.__getitem__.side_effect = lambda x: 't@test.com' if x == 'From' else None - message_mock.as_string.return_value = 'mock_text' - d.send_message(message_mock, - ['test1@test.com']) + message_mock.__getitem__.side_effect = lambda x: "t@test.com" if x == "From" else None + message_mock.as_string.return_value = "mock_text" + d.send_message(message_mock, ["test1@test.com"]) del d - mock_smtp.assert_has_calls([call('server', 25), - call().sendmail('t@test.com', ['test1@test.com'], 'mock_text'), - call().quit()]) + mock_smtp.assert_has_calls( + [ + call("server", 25), + call().sendmail("t@test.com", ["test1@test.com"], "mock_text"), + call().quit(), + ] + ) + + @patch("smtplib.SMTP") + def test_smtp_disconnected_del(self, mock_smtp): + # if the smtp server is already disconnected we shouldnt raise an exception when + # deleting the connection + config = { + "smtp_server": "server", + "smtp_port": 25, + "smtp_ssl": False, + "smtp_username": None, + "smtp_password": None, + } + d = SmtpDelivery(config, MagicMock(), MagicMock()) + d._smtp_connection.quit.side_effect = smtplib.SMTPServerDisconnected + del d diff --git a/tools/c7n_mailer/tests/test_sns.py b/tools/c7n_mailer/tests/test_sns.py index 4b52c52fff3..96fb43dd1c5 100644 --- a/tools/c7n_mailer/tests/test_sns.py +++ b/tools/c7n_mailer/tests/test_sns.py @@ -10,18 +10,17 @@ class SnsTest(unittest.TestCase): - def setUp(self): self.sns_delivery = SnsDelivery(MAILER_CONFIG, boto3.Session(), logger) - self.sns_topic_example = 'arn:aws:sns:us-east-1:172519456306:cloud-custodian' + self.sns_topic_example = "arn:aws:sns:us-east-1:172519456306:cloud-custodian" def test_target_is_sns(self): - self.assertEqual(self.sns_delivery.target_is_sns('lksdjl'), False) - self.assertEqual(self.sns_delivery.target_is_sns('baz@qux.bar'), False) + self.assertEqual(self.sns_delivery.target_is_sns("lksdjl"), False) + self.assertEqual(self.sns_delivery.target_is_sns("baz@qux.bar"), False) self.assertEqual(self.sns_delivery.target_is_sns(self.sns_topic_example), True) def test_get_valid_sns_from_list(self): - targets = ['resource-owner', 'milton@initech.com', self.sns_topic_example] + targets = ["resource-owner", "milton@initech.com", self.sns_topic_example] sns_list = self.sns_delivery.get_valid_sns_from_list(targets) self.assertEqual(sns_list, [self.sns_topic_example]) diff --git a/tools/c7n_mailer/tests/test_splunk.py b/tools/c7n_mailer/tests/test_splunk.py index 9951742a90f..4e34999db46 100644 --- a/tools/c7n_mailer/tests/test_splunk.py +++ b/tools/c7n_mailer/tests/test_splunk.py @@ -10,29 +10,23 @@ from c7n_mailer.splunk_delivery import SplunkHecDelivery -pbm = 'c7n_mailer.splunk_delivery' -pb = '%s.SplunkHecDelivery' % pbm +pbm = "c7n_mailer.splunk_delivery" +pb = "%s.SplunkHecDelivery" % pbm class DeliveryTester: - def setup(self): self.mock_sess = Mock() self.mock_logger = Mock(spec_set=Logger) self.config = { - 'splunk_index': 'my_index_name', - 'splunk_url': 'https://splunk.url/foo', - 'splunk_token': 'stoken' + "splunk_index": "my_index_name", + "splunk_url": "https://splunk.url/foo", + "splunk_token": "stoken", } - self.cls = SplunkHecDelivery( - self.config, - self.mock_sess, - self.mock_logger - ) + self.cls = SplunkHecDelivery(self.config, self.mock_sess, self.mock_logger) class TestInit(DeliveryTester): - def test_init(self): assert self.cls.logger == self.mock_logger assert self.cls.config == self.config @@ -40,555 +34,372 @@ def test_init(self): class TestGetSplunkPayloads(DeliveryTester): - @patch( - '%s.get_splunk_events' % pb, - return_value=[ - {'account': 'A', 'resource': 1}, - {'resource': 2} - ] - ) - @patch( - '%s._splunk_indices_for_message' % pb, - return_value=['indexA', 'indexB'] + "%s.get_splunk_events" % pb, return_value=[{"account": "A", "resource": 1}, {"resource": 2}] ) + @patch("%s._splunk_indices_for_message" % pb, return_value=["indexA", "indexB"]) def test_payloads(self, mock_gse, mock_sifm): - msg = {'some': 'message'} + msg = {"some": "message"} ts = 1557493290000 result = self.cls.get_splunk_payloads(msg, ts) assert result == [ { - 'time': ts, - 'host': 'cloud-custodian', - 'source': 'A-cloud-custodian', - 'sourcetype': '_json', - 'index': 'indexA', - 'event': {'account': 'A', 'resource': 1} + "time": ts, + "host": "cloud-custodian", + "source": "A-cloud-custodian", + "sourcetype": "_json", + "index": "indexA", + "event": {"account": "A", "resource": 1}, }, { - 'time': ts, - 'host': 'cloud-custodian', - 'source': 'A-cloud-custodian', - 'sourcetype': '_json', - 'index': 'indexB', - 'event': {'account': 'A', 'resource': 1} + "time": ts, + "host": "cloud-custodian", + "source": "A-cloud-custodian", + "sourcetype": "_json", + "index": "indexB", + "event": {"account": "A", "resource": 1}, }, { - 'time': ts, - 'host': 'cloud-custodian', - 'source': 'unknown-cloud-custodian', - 'sourcetype': '_json', - 'index': 'indexA', - 'event': {'resource': 2} + "time": ts, + "host": "cloud-custodian", + "source": "unknown-cloud-custodian", + "sourcetype": "_json", + "index": "indexA", + "event": {"resource": 2}, }, { - 'time': ts, - 'host': 'cloud-custodian', - 'source': 'unknown-cloud-custodian', - 'sourcetype': '_json', - 'index': 'indexB', - 'event': {'resource': 2} - } + "time": ts, + "host": "cloud-custodian", + "source": "unknown-cloud-custodian", + "sourcetype": "_json", + "index": "indexB", + "event": {"resource": 2}, + }, ] assert mock_gse.mock_calls == [call(msg)] assert mock_sifm.mock_calls == [call(msg)] @patch( - '%s.get_splunk_events' % pb, - return_value=[ - {'account': 'A', 'resource': 1}, - {'resource': 2} - ] - ) - @patch( - '%s._splunk_indices_for_message' % pb, - return_value=['indexA', 'indexB'] + "%s.get_splunk_events" % pb, return_value=[{"account": "A", "resource": 1}, {"resource": 2}] ) + @patch("%s._splunk_indices_for_message" % pb, return_value=["indexA", "indexB"]) def test_sourcetype(self, mock_gse, mock_sifm): - self.config['splunk_hec_sourcetype'] = 'custom-sourcetype' - msg = {'some': 'message'} + self.config["splunk_hec_sourcetype"] = "custom-sourcetype" + msg = {"some": "message"} ts = 1557493290000 result = self.cls.get_splunk_payloads(msg, ts) assert result == [ { - 'time': ts, - 'host': 'cloud-custodian', - 'source': 'A-cloud-custodian', - 'sourcetype': 'custom-sourcetype', - 'index': 'indexA', - 'event': {'account': 'A', 'resource': 1} + "time": ts, + "host": "cloud-custodian", + "source": "A-cloud-custodian", + "sourcetype": "custom-sourcetype", + "index": "indexA", + "event": {"account": "A", "resource": 1}, }, { - 'time': ts, - 'host': 'cloud-custodian', - 'source': 'A-cloud-custodian', - 'sourcetype': 'custom-sourcetype', - 'index': 'indexB', - 'event': {'account': 'A', 'resource': 1} + "time": ts, + "host": "cloud-custodian", + "source": "A-cloud-custodian", + "sourcetype": "custom-sourcetype", + "index": "indexB", + "event": {"account": "A", "resource": 1}, }, { - 'time': ts, - 'host': 'cloud-custodian', - 'source': 'unknown-cloud-custodian', - 'sourcetype': 'custom-sourcetype', - 'index': 'indexA', - 'event': {'resource': 2} + "time": ts, + "host": "cloud-custodian", + "source": "unknown-cloud-custodian", + "sourcetype": "custom-sourcetype", + "index": "indexA", + "event": {"resource": 2}, }, { - 'time': ts, - 'host': 'cloud-custodian', - 'source': 'unknown-cloud-custodian', - 'sourcetype': 'custom-sourcetype', - 'index': 'indexB', - 'event': {'resource': 2} - } + "time": ts, + "host": "cloud-custodian", + "source": "unknown-cloud-custodian", + "sourcetype": "custom-sourcetype", + "index": "indexB", + "event": {"resource": 2}, + }, ] assert mock_gse.mock_calls == [call(msg)] assert mock_sifm.mock_calls == [call(msg)] class TestGetSplunkEvents(DeliveryTester): - - @patch( - '%s.get_aws_username_from_event' % pbm, - return_value='uname' - ) - @patch( - '%s._prune_log_message' % pb, return_value={'event': 'cleaned'} - ) + @patch("%s.get_aws_username_from_event" % pbm, return_value="uname") + @patch("%s._prune_log_message" % pb, return_value={"event": "cleaned"}) def test_simple(self, mock_prune, mock_getuser): - def se_tags(res): - if res['InstanceId'] == 'i-123': - return {'tag1': 'val1'} + if res["InstanceId"] == "i-123": + return {"tag1": "val1"} return {} msg = { - 'account': 'aname', - 'account_id': 'aid', - 'region': 'rname', - 'event': { - 'foo': '1', - 'source': 'esrc', - 'detail-type': 'etype' - }, - 'policy': { - 'resource': 'ec2', - 'name': 'pname', - 'actions': [ - 'foo', - {'type': 'bar'}, - {'type': 'notify'}, - 'baz' - ] + "account": "aname", + "account_id": "aid", + "region": "rname", + "event": {"foo": "1", "source": "esrc", "detail-type": "etype"}, + "policy": { + "resource": "ec2", + "name": "pname", + "actions": ["foo", {"type": "bar"}, {"type": "notify"}, "baz"], }, - 'resources': [ + "resources": [ { - 'InstanceId': 'i-123', - 'c7n:MatchedFilters': [1, 2], - 'Tags': [ - {'Key': 'tag1', 'Value': 'val1'} - ] + "InstanceId": "i-123", + "c7n:MatchedFilters": [1, 2], + "Tags": [{"Key": "tag1", "Value": "val1"}], }, - {'InstanceId': 'i-456'}, - {'InstanceId': 'i-789', 'c7n.metrics': {'foo': 'bar'}} - ] + {"InstanceId": "i-456"}, + {"InstanceId": "i-789", "c7n.metrics": {"foo": "bar"}}, + ], } - with patch('%s.tags_for_resource' % pb) as mock_tags: + with patch("%s.tags_for_resource" % pb) as mock_tags: mock_tags.side_effect = se_tags res = self.cls.get_splunk_events(msg) - assert res == [ - {'event': 'cleaned'}, - {'event': 'cleaned'}, - {'event': 'cleaned'} - ] + assert res == [{"event": "cleaned"}, {"event": "cleaned"}, {"event": "cleaned"}] assert mock_tags.mock_calls == [ - call(msg['resources'][0]), - call(msg['resources'][1]), - call(msg['resources'][2]) - ] - assert mock_getuser.mock_calls == [ - call(self.cls.logger, msg['event']) + call(msg["resources"][0]), + call(msg["resources"][1]), + call(msg["resources"][2]), ] + assert mock_getuser.mock_calls == [call(self.cls.logger, msg["event"])] assert mock_prune.mock_calls == [ - call({ - 'account': 'aname', - 'account_id': 'aid', - 'region': 'rname', - 'event': { - 'foo': '1', - 'source': 'esrc', - 'detail-type': 'etype' - }, - 'policy': { - 'resource': 'ec2', - 'name': 'pname', - 'actions': [ - 'foo', - {'type': 'bar'}, - {'type': 'notify'}, - 'baz' - ] - }, - 'resource': { - 'InstanceId': 'i-123', - 'c7n:MatchedFilters': [1, 2], - 'tags': {'tag1': 'val1'} - }, - 'event_triggering_user': 'uname' - }), - call({ - 'account': 'aname', - 'account_id': 'aid', - 'region': 'rname', - 'event': { - 'foo': '1', - 'source': 'esrc', - 'detail-type': 'etype' - }, - 'policy': { - 'resource': 'ec2', - 'name': 'pname', - 'actions': [ - 'foo', - {'type': 'bar'}, - {'type': 'notify'}, - 'baz' - ] - }, - 'resource': { - 'InstanceId': 'i-456', - 'tags': {} - }, - 'event_triggering_user': 'uname' - }), - call({ - 'account': 'aname', - 'account_id': 'aid', - 'region': 'rname', - 'event': { - 'foo': '1', - 'source': 'esrc', - 'detail-type': 'etype' - }, - 'policy': { - 'resource': 'ec2', - 'name': 'pname', - 'actions': [ - 'foo', - {'type': 'bar'}, - {'type': 'notify'}, - 'baz' - ] - }, - 'resource': { - 'InstanceId': 'i-789', - 'c7n.metrics': {'foo': 'bar'}, - 'tags': {} - }, - 'event_triggering_user': 'uname' - }) + call( + { + "account": "aname", + "account_id": "aid", + "region": "rname", + "event": {"foo": "1", "source": "esrc", "detail-type": "etype"}, + "policy": { + "resource": "ec2", + "name": "pname", + "actions": ["foo", {"type": "bar"}, {"type": "notify"}, "baz"], + }, + "resource": { + "InstanceId": "i-123", + "c7n:MatchedFilters": [1, 2], + "tags": {"tag1": "val1"}, + }, + "event_triggering_user": "uname", + } + ), + call( + { + "account": "aname", + "account_id": "aid", + "region": "rname", + "event": {"foo": "1", "source": "esrc", "detail-type": "etype"}, + "policy": { + "resource": "ec2", + "name": "pname", + "actions": ["foo", {"type": "bar"}, {"type": "notify"}, "baz"], + }, + "resource": {"InstanceId": "i-456", "tags": {}}, + "event_triggering_user": "uname", + } + ), + call( + { + "account": "aname", + "account_id": "aid", + "region": "rname", + "event": {"foo": "1", "source": "esrc", "detail-type": "etype"}, + "policy": { + "resource": "ec2", + "name": "pname", + "actions": ["foo", {"type": "bar"}, {"type": "notify"}, "baz"], + }, + "resource": {"InstanceId": "i-789", "c7n.metrics": {"foo": "bar"}, "tags": {}}, + "event_triggering_user": "uname", + } + ), ] - @patch( - '%s.get_aws_username_from_event' % pbm, - return_value='uname' - ) - @patch( - '%s._prune_log_message' % pb, return_value={'event': 'cleaned'} - ) + @patch("%s.get_aws_username_from_event" % pbm, return_value="uname") + @patch("%s._prune_log_message" % pb, return_value={"event": "cleaned"}) def test_splunk_actions_list(self, mock_prune, mock_getuser): - self.config['splunk_actions_list'] = True + self.config["splunk_actions_list"] = True def se_tags(res): - if res['InstanceId'] == 'i-123': - return {'tag1': 'val1'} + if res["InstanceId"] == "i-123": + return {"tag1": "val1"} return {} msg = { - 'account': 'aname', - 'account_id': 'aid', - 'region': 'rname', - 'event': { - 'foo': '1', - 'source': 'esrc', - 'detail-type': 'etype' + "account": "aname", + "account_id": "aid", + "region": "rname", + "event": {"foo": "1", "source": "esrc", "detail-type": "etype"}, + "policy": { + "resource": "ec2", + "name": "pname", + "actions": ["foo", {"type": "bar"}, {"type": "notify"}, "baz"], }, - 'policy': { - 'resource': 'ec2', - 'name': 'pname', - 'actions': [ - 'foo', - {'type': 'bar'}, - {'type': 'notify'}, - 'baz' - ] - }, - 'resources': [ - {'InstanceId': 'i-123', 'c7n:MatchedFilters': [1, 2]}, - {'InstanceId': 'i-456'}, - {'InstanceId': 'i-789', 'c7n.metrics': {'foo': 'bar'}} - ] + "resources": [ + {"InstanceId": "i-123", "c7n:MatchedFilters": [1, 2]}, + {"InstanceId": "i-456"}, + {"InstanceId": "i-789", "c7n.metrics": {"foo": "bar"}}, + ], } - with patch('%s.tags_for_resource' % pb) as mock_tags: + with patch("%s.tags_for_resource" % pb) as mock_tags: mock_tags.side_effect = se_tags res = self.cls.get_splunk_events(msg) - assert res == [ - {'event': 'cleaned'}, - {'event': 'cleaned'}, - {'event': 'cleaned'} - ] + assert res == [{"event": "cleaned"}, {"event": "cleaned"}, {"event": "cleaned"}] assert mock_tags.mock_calls == [ - call(msg['resources'][0]), - call(msg['resources'][1]), - call(msg['resources'][2]) - ] - assert mock_getuser.mock_calls == [ - call(self.cls.logger, msg['event']) + call(msg["resources"][0]), + call(msg["resources"][1]), + call(msg["resources"][2]), ] + assert mock_getuser.mock_calls == [call(self.cls.logger, msg["event"])] assert mock_prune.mock_calls == [ - call({ - 'account': 'aname', - 'account_id': 'aid', - 'region': 'rname', - 'event': { - 'foo': '1', - 'source': 'esrc', - 'detail-type': 'etype' - }, - 'policy': { - 'resource': 'ec2', - 'name': 'pname', - 'actions': [ - 'foo', - {'type': 'bar'}, - {'type': 'notify'}, - 'baz' - ] - }, - 'resource': { - 'InstanceId': 'i-123', - 'c7n:MatchedFilters': [1, 2], - 'tags': {'tag1': 'val1'} - }, - 'event_triggering_user': 'uname', - 'actions': ['foo', 'bar', 'notify', 'baz'] - }), - call({ - 'account': 'aname', - 'account_id': 'aid', - 'region': 'rname', - 'event': { - 'foo': '1', - 'source': 'esrc', - 'detail-type': 'etype' - }, - 'policy': { - 'resource': 'ec2', - 'name': 'pname', - 'actions': [ - 'foo', - {'type': 'bar'}, - {'type': 'notify'}, - 'baz' - ] - }, - 'resource': { - 'InstanceId': 'i-456', - 'tags': {} - }, - 'event_triggering_user': 'uname', - 'actions': ['foo', 'bar', 'notify', 'baz'] - }), - call({ - 'account': 'aname', - 'account_id': 'aid', - 'region': 'rname', - 'event': { - 'foo': '1', - 'source': 'esrc', - 'detail-type': 'etype' - }, - 'policy': { - 'resource': 'ec2', - 'name': 'pname', - 'actions': [ - 'foo', - {'type': 'bar'}, - {'type': 'notify'}, - 'baz' - ] - }, - 'resource': { - 'InstanceId': 'i-789', - 'c7n.metrics': {'foo': 'bar'}, - 'tags': {} - }, - 'event_triggering_user': 'uname', - 'actions': ['foo', 'bar', 'notify', 'baz'] - }) + call( + { + "account": "aname", + "account_id": "aid", + "region": "rname", + "event": {"foo": "1", "source": "esrc", "detail-type": "etype"}, + "policy": { + "resource": "ec2", + "name": "pname", + "actions": ["foo", {"type": "bar"}, {"type": "notify"}, "baz"], + }, + "resource": { + "InstanceId": "i-123", + "c7n:MatchedFilters": [1, 2], + "tags": {"tag1": "val1"}, + }, + "event_triggering_user": "uname", + "actions": ["foo", "bar", "notify", "baz"], + } + ), + call( + { + "account": "aname", + "account_id": "aid", + "region": "rname", + "event": {"foo": "1", "source": "esrc", "detail-type": "etype"}, + "policy": { + "resource": "ec2", + "name": "pname", + "actions": ["foo", {"type": "bar"}, {"type": "notify"}, "baz"], + }, + "resource": {"InstanceId": "i-456", "tags": {}}, + "event_triggering_user": "uname", + "actions": ["foo", "bar", "notify", "baz"], + } + ), + call( + { + "account": "aname", + "account_id": "aid", + "region": "rname", + "event": {"foo": "1", "source": "esrc", "detail-type": "etype"}, + "policy": { + "resource": "ec2", + "name": "pname", + "actions": ["foo", {"type": "bar"}, {"type": "notify"}, "baz"], + }, + "resource": {"InstanceId": "i-789", "c7n.metrics": {"foo": "bar"}, "tags": {}}, + "event_triggering_user": "uname", + "actions": ["foo", "bar", "notify", "baz"], + } + ), ] class TestPruneLogMessage(DeliveryTester): - def test_no_paths(self): - msg = { - 'foo': 'bar', - 'resource': { - 'c7n.metrics': [] - } - } + msg = {"foo": "bar", "resource": {"c7n.metrics": []}} assert self.cls._prune_log_message(msg) == msg def test_no_values(self): msg = { - 'foo': '123', - 'bar': [ - 'A', 'B', 'C' - ], - 'baz': { - 'blam': { - 'one': 1, - 'two': 2, - 'three': 3, - 'four': 4 - }, - 'blarg': { - 'quux': False - } - } + "foo": "123", + "bar": ["A", "B", "C"], + "baz": {"blam": {"one": 1, "two": 2, "three": 3, "four": 4}, "blarg": {"quux": False}}, } - self.config['splunk_remove_paths'] = [ - '/no/value/here', - '/bad', - '/not/a/path' - ] + self.config["splunk_remove_paths"] = ["/no/value/here", "/bad", "/not/a/path"] expected = { - 'foo': '123', - 'bar': [ - 'A', 'B', 'C' - ], - 'baz': { - 'blam': { - 'one': 1, - 'two': 2, - 'three': 3, - 'four': 4 - }, - 'blarg': { - 'quux': False - } - } + "foo": "123", + "bar": ["A", "B", "C"], + "baz": {"blam": {"one": 1, "two": 2, "three": 3, "four": 4}, "blarg": {"quux": False}}, } assert self.cls._prune_log_message(msg) == expected def test_remove_some(self): msg = { - 'foo': '123', - 'bar': [ - 'A', 'B', 'C' - ], - 'baz': { - 'blam': { - 'one': 1, - 'two': 2, - 'three': 3, - 'four': 4 - }, - 'blarg': { - 'quux': False - } - }, - 'resource': { - 'r1': 'r2', - 'c7n.metrics': ['a', 'b'] - } + "foo": "123", + "bar": ["A", "B", "C"], + "baz": {"blam": {"one": 1, "two": 2, "three": 3, "four": 4}, "blarg": {"quux": False}}, + "resource": {"r1": "r2", "c7n.metrics": ["a", "b"]}, } - self.config['splunk_remove_paths'] = [ - '/bar/1', - '/baz/blarg', - '/baz/blam/one', - '/baz/blam/two', - '/not/a/path', - '/resource/c7n.metrics' + self.config["splunk_remove_paths"] = [ + "/bar/1", + "/baz/blarg", + "/baz/blam/one", + "/baz/blam/two", + "/not/a/path", + "/resource/c7n.metrics", ] expected = { - 'foo': '123', - 'bar': [ - 'A', 'C' - ], - 'baz': { - 'blam': { - 'three': 3, - 'four': 4 - } - }, - 'resource': { - 'r1': 'r2' - } + "foo": "123", + "bar": ["A", "C"], + "baz": {"blam": {"three": 3, "four": 4}}, + "resource": {"r1": "r2"}, } assert self.cls._prune_log_message(msg) == expected class TestDeliverSplunkMessages(DeliveryTester): - def test_handle_success(self): - msg = [ - {'foo': 'bar'}, - {'baz': 'blam'} - ] - with patch('%s._try_send' % pb, autospec=True) as mock_send: + msg = [{"foo": "bar"}, {"baz": "blam"}] + with patch("%s._try_send" % pb, autospec=True) as mock_send: mock_send.return_value = True self.cls.deliver_splunk_messages(msg) assert mock_send.mock_calls == [ - call(self.cls, {'foo': 'bar'}), - call(self.cls, {'baz': 'blam'}) + call(self.cls, {"foo": "bar"}), + call(self.cls, {"baz": "blam"}), ] def test_handle_failure(self): - msg = [ - {'foo': 'bar'}, - {'baz': 'blam'} - ] - with patch('%s._try_send' % pb, autospec=True) as mock_send: + msg = [{"foo": "bar"}, {"baz": "blam"}] + with patch("%s._try_send" % pb, autospec=True) as mock_send: mock_send.side_effect = [True, False] with pytest.raises(RuntimeError): self.cls.deliver_splunk_messages(msg) assert mock_send.mock_calls == [ - call(self.cls, {'foo': 'bar'}), - call(self.cls, {'baz': 'blam'}) + call(self.cls, {"foo": "bar"}), + call(self.cls, {"baz": "blam"}), ] class TestTrySend(DeliveryTester): - def test_success(self): - self.config['splunk_max_attempts'] = 3 - self.config['splunk_hex_max_length'] = None - with patch('%s.sleep' % pbm) as mock_sleep: - with patch('%s.uniform' % pbm) as mock_uniform: - with patch('%s._send_splunk' % pb) as mock_send: + self.config["splunk_max_attempts"] = 3 + self.config["splunk_hex_max_length"] = None + with patch("%s.sleep" % pbm) as mock_sleep: + with patch("%s.uniform" % pbm) as mock_uniform: + with patch("%s._send_splunk" % pb) as mock_send: mock_uniform.return_value = 1.2 - res = self.cls._try_send({'foo': 'bar'}) + res = self.cls._try_send({"foo": "bar"}) assert res is True assert mock_sleep.mock_calls == [] assert mock_uniform.mock_calls == [] - assert mock_send.mock_calls == [ - call('{"foo": "bar"}') - ] + assert mock_send.mock_calls == [call('{"foo": "bar"}')] assert self.mock_logger.mock_calls == [] def test_payload_too_long(self): - self.config['splunk_max_attempts'] = 3 - self.config['splunk_hec_max_length'] = 3000 + self.config["splunk_max_attempts"] = 3 + self.config["splunk_hec_max_length"] = 3000 p = {} for i in range(1, 2000): - p['%d' % i] = i + p["%d" % i] = i j = json.dumps(p) - with patch('%s.sleep' % pbm) as mock_sleep: - with patch('%s.uniform' % pbm) as mock_uniform: - with patch('%s._send_splunk' % pb) as mock_send: + with patch("%s.sleep" % pbm) as mock_sleep: + with patch("%s.uniform" % pbm) as mock_uniform: + with patch("%s._send_splunk" % pb) as mock_send: mock_uniform.return_value = 1.2 self.cls._try_send(p) assert mock_sleep.mock_calls == [] @@ -596,297 +407,257 @@ def test_payload_too_long(self): assert mock_send.mock_calls == [call(j)] assert self.mock_logger.mock_calls == [ call.error( - 'ERROR: Sending %d characters to Splunk HEC; line length ' - 'limit is %d characters. Data will be truncated: %s', - 25772, 3000, j + "ERROR: Sending %d characters to Splunk HEC; line length " + "limit is %d characters. Data will be truncated: %s", + 25772, + 3000, + j, ) ] def test_fail_once(self): - self.config['splunk_max_attempts'] = 3 - self.config['splunk_hex_max_length'] = None - with patch('%s.sleep' % pbm) as mock_sleep: - with patch('%s.uniform' % pbm) as mock_uniform: - with patch('%s._send_splunk' % pb) as mock_send: + self.config["splunk_max_attempts"] = 3 + self.config["splunk_hex_max_length"] = None + with patch("%s.sleep" % pbm) as mock_sleep: + with patch("%s.uniform" % pbm) as mock_uniform: + with patch("%s._send_splunk" % pb) as mock_send: mock_uniform.return_value = 1.2 mock_send.side_effect = [ # raise an Exception first time, succeed second - RuntimeError('foo'), - None + RuntimeError("foo"), + None, ] - res = self.cls._try_send({'foo': 'bar'}) + res = self.cls._try_send({"foo": "bar"}) assert res is True assert mock_sleep.mock_calls == [call(1.2)] assert mock_uniform.mock_calls == [call(1, 4)] - assert mock_send.mock_calls == [ - call('{"foo": "bar"}'), - call('{"foo": "bar"}') - ] + assert mock_send.mock_calls == [call('{"foo": "bar"}'), call('{"foo": "bar"}')] assert self.mock_logger.mock_calls == [ - call.warning( - 'Caught exception sending to Splunk; retry in %s seconds', 1.2 - ) + call.warning("Caught exception sending to Splunk; retry in %s seconds", 1.2) ] def test_fail_always(self): - self.config['splunk_max_attempts'] = 3 - self.config['splunk_hex_max_length'] = None - with patch('%s.sleep' % pbm) as mock_sleep: - with patch('%s.uniform' % pbm) as mock_uniform: - with patch('%s._send_splunk' % pb) as mock_send: + self.config["splunk_max_attempts"] = 3 + self.config["splunk_hex_max_length"] = None + with patch("%s.sleep" % pbm) as mock_sleep: + with patch("%s.uniform" % pbm) as mock_uniform: + with patch("%s._send_splunk" % pb) as mock_send: mock_uniform.return_value = 1.2 - mock_send.side_effect = RuntimeError('foo') - res = self.cls._try_send({'foo': 'bar'}) + mock_send.side_effect = RuntimeError("foo") + res = self.cls._try_send({"foo": "bar"}) assert res is False - assert mock_sleep.mock_calls == [ - call(1.2), - call(1.2), - call(1.2) - ] - assert mock_uniform.mock_calls == [ - call(1, 4), - call(1, 4), - call(1, 4) - ] + assert mock_sleep.mock_calls == [call(1.2), call(1.2), call(1.2)] + assert mock_uniform.mock_calls == [call(1, 4), call(1, 4), call(1, 4)] assert mock_send.mock_calls == [ call('{"foo": "bar"}'), call('{"foo": "bar"}'), - call('{"foo": "bar"}') + call('{"foo": "bar"}'), ] assert self.mock_logger.mock_calls == [ - call.warning( - 'Caught exception sending to Splunk; retry in %s seconds', 1.2 - ), - call.warning( - 'Caught exception sending to Splunk; retry in %s seconds', 1.2 - ), - call.warning( - 'Caught exception sending to Splunk; retry in %s seconds', 1.2 - ), - call.error( - 'ERROR - Could not POST to Splunk after %d tries.', 3 - ) + call.warning("Caught exception sending to Splunk; retry in %s seconds", 1.2), + call.warning("Caught exception sending to Splunk; retry in %s seconds", 1.2), + call.warning("Caught exception sending to Splunk; retry in %s seconds", 1.2), + call.error("ERROR - Could not POST to Splunk after %d tries.", 3), ] class TestSendSplunk(DeliveryTester): - def test_send(self): - self.config['splunk_hec_url'] = 'https://splunk.url/foo' - self.config['splunk_hec_token'] = 'stoken' + self.config["splunk_hec_url"] = "https://splunk.url/foo" + self.config["splunk_hec_token"] = "stoken" m_resp = Mock(spec_set=requests.models.Response) type(m_resp).status_code = 200 type(m_resp).text = '{"text": "Success"}' - type(m_resp).headers = {'H1': 'V1'} - m_resp.json.return_value = {'text': 'Success'} - with patch('%s.requests' % pbm, autospec=True) as mock_req: + type(m_resp).headers = {"H1": "V1"} + m_resp.json.return_value = {"text": "Success"} + with patch("%s.requests" % pbm, autospec=True) as mock_req: mock_req.post.return_value = m_resp self.cls._send_splunk('{"foo": "bar"}') assert mock_req.mock_calls == [ call.post( - 'https://splunk.url/foo', - headers={'Authorization': 'Splunk stoken'}, - data='{"foo": "bar"}' + "https://splunk.url/foo", + headers={"Authorization": "Splunk stoken"}, + data='{"foo": "bar"}', ), - call.post().json() + call.post().json(), ] assert self.mock_logger.mock_calls == [ + call.debug("Send to Splunk (%s): %s", "https://splunk.url/foo", '{"foo": "bar"}'), call.debug( - 'Send to Splunk (%s): %s', 'https://splunk.url/foo', - '{"foo": "bar"}' + "Splunk POST got response code %s HEADERS=%s BODY: %s", + 200, + {"H1": "V1"}, + '{"text": "Success"}', ), - call.debug( - 'Splunk POST got response code %s HEADERS=%s BODY: %s', - 200, {'H1': 'V1'}, '{"text": "Success"}' - ) ] def test_send_exception(self): - self.config['splunk_hec_url'] = 'https://splunk.url/foo' - self.config['splunk_hec_token'] = 'stoken' + self.config["splunk_hec_url"] = "https://splunk.url/foo" + self.config["splunk_hec_token"] = "stoken" def se_post(*args, **kwargs): - raise Exception('foo') + raise Exception("foo") - with patch('%s.requests' % pbm, autospec=True) as mock_req: + with patch("%s.requests" % pbm, autospec=True) as mock_req: mock_req.post.side_effect = se_post with pytest.raises(Exception): self.cls._send_splunk('{"foo": "bar"}') assert mock_req.mock_calls == [ call.post( - 'https://splunk.url/foo', - headers={'Authorization': 'Splunk stoken'}, - data='{"foo": "bar"}' + "https://splunk.url/foo", + headers={"Authorization": "Splunk stoken"}, + data='{"foo": "bar"}', ) ] assert self.mock_logger.mock_calls == [ - call.debug( - 'Send to Splunk (%s): %s', 'https://splunk.url/foo', - '{"foo": "bar"}' - ), + call.debug("Send to Splunk (%s): %s", "https://splunk.url/foo", '{"foo": "bar"}'), call.error( - 'Exception during Splunk POST to %s of %s', - 'https://splunk.url/foo', '{"foo": "bar"}', exc_info=True - ) + "Exception during Splunk POST to %s of %s", + "https://splunk.url/foo", + '{"foo": "bar"}', + exc_info=True, + ), ] def test_send_bad_status(self): - self.config['splunk_hec_url'] = 'https://splunk.url/foo' - self.config['splunk_hec_token'] = 'stoken' + self.config["splunk_hec_url"] = "https://splunk.url/foo" + self.config["splunk_hec_token"] = "stoken" m_resp = Mock(spec_set=requests.models.Response) type(m_resp).status_code = 403 type(m_resp).text = '{"text": "Success"}' - type(m_resp).headers = {'H1': 'V1'} - m_resp.json.return_value = {'text': 'Success'} - with patch('%s.requests' % pbm, autospec=True) as mock_req: + type(m_resp).headers = {"H1": "V1"} + m_resp.json.return_value = {"text": "Success"} + with patch("%s.requests" % pbm, autospec=True) as mock_req: mock_req.post.return_value = m_resp with pytest.raises(RuntimeError): self.cls._send_splunk('{"foo": "bar"}') assert mock_req.mock_calls == [ call.post( - 'https://splunk.url/foo', - headers={'Authorization': 'Splunk stoken'}, - data='{"foo": "bar"}' + "https://splunk.url/foo", + headers={"Authorization": "Splunk stoken"}, + data='{"foo": "bar"}', ) ] assert self.mock_logger.mock_calls == [ + call.debug("Send to Splunk (%s): %s", "https://splunk.url/foo", '{"foo": "bar"}'), call.debug( - 'Send to Splunk (%s): %s', 'https://splunk.url/foo', - '{"foo": "bar"}' - ), - call.debug( - 'Splunk POST got response code %s HEADERS=%s BODY: %s', - 403, {'H1': 'V1'}, '{"text": "Success"}' + "Splunk POST got response code %s HEADERS=%s BODY: %s", + 403, + {"H1": "V1"}, + '{"text": "Success"}', ), call.error( - 'Splunk POST returned non-20x response: %s HEADERS=%s BODY: %s', - 403, {'H1': 'V1'}, '{"text": "Success"}' - ) + "Splunk POST returned non-20x response: %s HEADERS=%s BODY: %s", + 403, + {"H1": "V1"}, + '{"text": "Success"}', + ), ] def test_send_non_success(self): - self.config['splunk_hec_url'] = 'https://splunk.url/foo' - self.config['splunk_hec_token'] = 'stoken' + self.config["splunk_hec_url"] = "https://splunk.url/foo" + self.config["splunk_hec_token"] = "stoken" m_resp = Mock(spec_set=requests.models.Response) type(m_resp).status_code = 200 type(m_resp).text = '{"text": "Failure"}' - type(m_resp).headers = {'H1': 'V1'} - m_resp.json.return_value = {'text': 'Failure'} - with patch('%s.requests' % pbm, autospec=True) as mock_req: + type(m_resp).headers = {"H1": "V1"} + m_resp.json.return_value = {"text": "Failure"} + with patch("%s.requests" % pbm, autospec=True) as mock_req: mock_req.post.return_value = m_resp with pytest.raises(RuntimeError): self.cls._send_splunk('{"foo": "bar"}') assert mock_req.mock_calls == [ call.post( - 'https://splunk.url/foo', - headers={'Authorization': 'Splunk stoken'}, - data='{"foo": "bar"}' + "https://splunk.url/foo", + headers={"Authorization": "Splunk stoken"}, + data='{"foo": "bar"}', ), - call.post().json() + call.post().json(), ] assert self.mock_logger.mock_calls == [ + call.debug("Send to Splunk (%s): %s", "https://splunk.url/foo", '{"foo": "bar"}'), call.debug( - 'Send to Splunk (%s): %s', 'https://splunk.url/foo', - '{"foo": "bar"}' - ), - call.debug( - 'Splunk POST got response code %s HEADERS=%s BODY: %s', - 200, {'H1': 'V1'}, '{"text": "Failure"}' + "Splunk POST got response code %s HEADERS=%s BODY: %s", + 200, + {"H1": "V1"}, + '{"text": "Failure"}', ), - call.error( - 'Splunk POST returned non-success response: %s', - {'text': 'Failure'} - ) + call.error("Splunk POST returned non-success response: %s", {"text": "Failure"}), ] def test_send_non_success_no_json(self): - self.config['splunk_hec_url'] = 'https://splunk.url/foo' - self.config['splunk_hec_token'] = 'stoken' + self.config["splunk_hec_url"] = "https://splunk.url/foo" + self.config["splunk_hec_token"] = "stoken" def se_exc(*args, **kwargs): - raise Exception('foo') + raise Exception("foo") m_resp = Mock(spec_set=requests.models.Response) type(m_resp).status_code = 200 type(m_resp).text = '{"text": "Failure"}' - type(m_resp).headers = {'H1': 'V1'} + type(m_resp).headers = {"H1": "V1"} m_resp.json.side_effect = se_exc - with patch('%s.requests' % pbm, autospec=True) as mock_req: + with patch("%s.requests" % pbm, autospec=True) as mock_req: mock_req.post.return_value = m_resp with pytest.raises(RuntimeError): self.cls._send_splunk('{"foo": "bar"}') assert mock_req.mock_calls == [ call.post( - 'https://splunk.url/foo', - headers={'Authorization': 'Splunk stoken'}, - data='{"foo": "bar"}' + "https://splunk.url/foo", + headers={"Authorization": "Splunk stoken"}, + data='{"foo": "bar"}', ), - call.post().json() + call.post().json(), ] assert self.mock_logger.mock_calls == [ + call.debug("Send to Splunk (%s): %s", "https://splunk.url/foo", '{"foo": "bar"}'), call.debug( - 'Send to Splunk (%s): %s', 'https://splunk.url/foo', - '{"foo": "bar"}' - ), - call.debug( - 'Splunk POST got response code %s HEADERS=%s BODY: %s', - 200, {'H1': 'V1'}, '{"text": "Failure"}' + "Splunk POST got response code %s HEADERS=%s BODY: %s", + 200, + {"H1": "V1"}, + '{"text": "Failure"}', ), call.error( - 'Splunk POST returned non-success response: %s', - {'text': '{"text": "Failure"}'} - ) + "Splunk POST returned non-success response: %s", {"text": '{"text": "Failure"}'} + ), ] class TestTagsForResource(DeliveryTester): - def test_empty_resource(self): assert self.cls.tags_for_resource({}) == {} def test_tags_none(self): - assert self.cls.tags_for_resource({'Tags': None}) == {} + assert self.cls.tags_for_resource({"Tags": None}) == {} def test_tags_list(self): - assert self.cls.tags_for_resource({ - 'Tags': [ - { - 'Key': 'foo', - 'Value': 'bar' - }, - { - 'Key': 'one', - 'Value': 'two' - } - ] - }) == {'foo': 'bar', 'one': 'two'} + assert self.cls.tags_for_resource( + {"Tags": [{"Key": "foo", "Value": "bar"}, {"Key": "one", "Value": "two"}]} + ) == {"foo": "bar", "one": "two"} class TestSplunkIndicesForMessage(DeliveryTester): - def test_no_message(self): assert self.cls._splunk_indices_for_message(None) == [] def test_no_action(self): - assert self.cls._splunk_indices_for_message({'foo': 'bar'}) == [] + assert self.cls._splunk_indices_for_message({"foo": "bar"}) == [] def test_action_no_to(self): - assert self.cls._splunk_indices_for_message( - {'action': {'foo': 'bar'}} - ) == [] + assert self.cls._splunk_indices_for_message({"action": {"foo": "bar"}}) == [] def test_simple(self): msg = { - 'action': { - 'to': { - 'foo', - 'splunkhec://bar', - 'baz@example.com', - 'splunkhec://blam', - 'slack://quux' + "action": { + "to": { + "foo", + "splunkhec://bar", + "baz@example.com", + "splunkhec://blam", + "slack://quux", } } } - expected = ['bar', 'blam'] + expected = ["bar", "blam"] res = self.cls._splunk_indices_for_message(msg) assert res == expected diff --git a/tools/c7n_mailer/tests/test_utils.py b/tools/c7n_mailer/tests/test_utils.py index 1a42eceec4a..2073ca77b2b 100644 --- a/tools/c7n_mailer/tests/test_utils.py +++ b/tools/c7n_mailer/tests/test_utils.py @@ -15,22 +15,21 @@ class FormatStruct(unittest.TestCase): - def test_formats_struct(self): expected = '{\n "foo": "bar"\n}' - actual = utils.format_struct({'foo': 'bar'}) + actual = utils.format_struct({"foo": "bar"}) self.assertEqual(expected, actual) class StripPrefix(unittest.TestCase): - def test_strip_prefix(self): - self.assertEqual(utils.strip_prefix('aws.internet-gateway', 'aws.'), 'internet-gateway') - self.assertEqual(utils.strip_prefix('aws.s3', 'aws.'), 's3') - self.assertEqual(utils.strip_prefix('aws.webserver', 'aws.'), 'webserver') - self.assertEqual(utils.strip_prefix('nothing', 'aws.'), 'nothing') - self.assertEqual(utils.strip_prefix('azure.azserver', 'azure.'), 'azserver') - self.assertEqual(utils.strip_prefix('', 'aws.'), '') + self.assertEqual(utils.strip_prefix("aws.internet-gateway", "aws."), "internet-gateway") + self.assertEqual(utils.strip_prefix("aws.s3", "aws."), "s3") + self.assertEqual(utils.strip_prefix("aws.webserver", "aws."), "webserver") + self.assertEqual(utils.strip_prefix("nothing", "aws."), "nothing") + self.assertEqual(utils.strip_prefix("azure.azserver", "azure."), "azserver") + self.assertEqual(utils.strip_prefix("gcp.instance", "gcp."), "instance") + self.assertEqual(utils.strip_prefix("", "aws."), "") def test_config_defaults(): @@ -40,87 +39,94 @@ def test_config_defaults(): if v is None: config.pop(k) assert config == dict( - region='us-east-1', - ses_region='us-east-1', + region="us-east-1", + ses_region="us-east-1", memory=1024, timeout=300, - runtime='python3.7', - contact_tags=[]) + runtime="python3.7", + contact_tags=[], + ) class GetResourceTagTargets(unittest.TestCase): - def test_target_tag_list(self): self.assertEqual( utils.get_resource_tag_targets( - {'Tags': [{'Key': 'Creator', 'Value': 'alice'}]}, - ['Creator']), - ['alice']) + {"Tags": [{"Key": "Creator", "Value": "alice"}]}, ["Creator"] + ), + ["alice"], + ) def test_target_tag_map(self): - r = {'Tags': {'Creator': 'Bob'}} - self.assertEqual( - utils.get_resource_tag_targets(r, ['Creator']), - ['Bob']) + r = {"Tags": {"Creator": "Bob"}} + self.assertEqual(utils.get_resource_tag_targets(r, ["Creator"]), ["Bob"]) class ResourceFormat(unittest.TestCase): - def test_efs(self): self.assertEqual( utils.resource_format( - {'Name': 'abc', 'FileSystemId': 'fsid', 'LifeCycleState': 'available'}, - 'efs'), - 'name: abc id: fsid state: available') + {"Name": "abc", "FileSystemId": "fsid", "LifeCycleState": "available"}, "efs" + ), + "name: abc id: fsid state: available", + ) def test_eip(self): self.assertEqual( utils.resource_format( - {'PublicIp': '8.8.8.8', 'Domain': 'vpc', 'AllocationId': 'eipxyz'}, - 'network-addr'), - 'ip: 8.8.8.8 id: eipxyz scope: vpc') + {"PublicIp": "8.8.8.8", "Domain": "vpc", "AllocationId": "eipxyz"}, "network-addr" + ), + "ip: 8.8.8.8 id: eipxyz scope: vpc", + ) def test_nat(self): self.assertEqual( utils.resource_format( - {'NatGatewayId': 'nat-xyz', 'State': 'available', 'VpcId': 'vpc-123'}, - 'nat-gateway'), - 'id: nat-xyz state: available vpc: vpc-123') + {"NatGatewayId": "nat-xyz", "State": "available", "VpcId": "vpc-123"}, "nat-gateway" + ), + "id: nat-xyz state: available vpc: vpc-123", + ) def test_igw(self): self.assertEqual( utils.resource_format( - {'InternetGatewayId': 'igw-x', 'Attachments': []}, - 'aws.internet-gateway'), - 'id: igw-x attachments: 0') + {"InternetGatewayId": "igw-x", "Attachments": []}, "aws.internet-gateway" + ), + "id: igw-x attachments: 0", + ) def test_rds_cluster(self): self.assertEqual( utils.resource_format( - {'DBClusterIdentifier': 'database-2', - 'Engine': 'mysql-aurora', - 'EngineVersion': '5.7.mysql_aurora.2.07.2', - 'AllocatedStorage': '1'}, - 'rds-cluster'), - 'database-2 mysql-aurora-5.7.mysql_aurora.2.07.2 1', + { + "DBClusterIdentifier": "database-2", + "Engine": "mysql-aurora", + "EngineVersion": "5.7.mysql_aurora.2.07.2", + "AllocatedStorage": "1", + }, + "rds-cluster", + ), + "database-2 mysql-aurora-5.7.mysql_aurora.2.07.2 1", ) def test_s3(self): - self.assertEqual( - utils.resource_format( - {'Name': 'bucket-x'}, 'aws.s3'), - 'bucket-x') + self.assertEqual(utils.resource_format({"Name": "bucket-x"}, "aws.s3"), "bucket-x") def test_alb(self): self.assertEqual( utils.resource_format( - {'LoadBalancerArn': 'arn:aws:elasticloadbalancing:us-east-1:367930536793' - ':loadbalancer/app/dev/1234567890', - 'AvailabilityZones': [], 'Scheme': 'internal'}, - 'app-elb'), - 'arn: arn:aws:elasticloadbalancing:us-east-1:367930536793:' - 'loadbalancer/app/dev/1234567890' - ' zones: 0 scheme: internal') + { + "LoadBalancerArn": "arn:aws:elasticloadbalancing:us-east-1:367930536793" + ":loadbalancer/app/dev/1234567890", + "AvailabilityZones": [], + "Scheme": "internal", + }, + "app-elb", + ), + "arn: arn:aws:elasticloadbalancing:us-east-1:367930536793:" + "loadbalancer/app/dev/1234567890" + " zones: 0 scheme: internal", + ) def test_cloudtrail(self): self.assertEqual( @@ -154,7 +160,7 @@ def test_service_quota(self): "service-quota", ), "ServiceName: Amazon EC2 Auto Scaling QuotaName: Auto Scaling groups per region " - "Quota: 200 Usage: 54\n" + "Quota: 200 Usage: 54\n", ) def test_service_quota_none_usagemetric(self): @@ -163,13 +169,11 @@ def test_service_quota_none_usagemetric(self): { "ServiceName": "AWS Cloud Map", "QuotaName": "Namespaces per Region", - "c7n:MatchedFilters": [ - "UsageMetric" - ] + "c7n:MatchedFilters": ["UsageMetric"], }, "service-quota", ), - "ServiceName: AWS Cloud Map QuotaName: Namespaces per Region\n" + "ServiceName: AWS Cloud Map QuotaName: Namespaces per Region\n", ) @@ -177,231 +181,154 @@ class GetAwsUsernameFromEvent(unittest.TestCase): # note principalId is very org/domain specific for federated?, it would be # good to get confirmation from capone on this event / test. CLOUDTRAIL_EVENT = { - 'detail': { - 'userIdentity': { + "detail": { + "userIdentity": { "type": "IAMUser", "principalId": "AIDAJ45Q7YFFAREXAMPLE", "arn": "arn:aws:iam::123456789012:user/michael_bolton", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", - "userName": "michael_bolton" + "userName": "michael_bolton", } } } def test_get(self): - username = utils.get_aws_username_from_event( - Mock(), self.CLOUDTRAIL_EVENT - ) - self.assertEqual(username, 'michael_bolton') + username = utils.get_aws_username_from_event(Mock(), self.CLOUDTRAIL_EVENT) + self.assertEqual(username, "michael_bolton") def test_get_username_none(self): - self.assertEqual( - utils.get_aws_username_from_event(Mock(), None), - None - ) + self.assertEqual(utils.get_aws_username_from_event(Mock(), None), None) def test_get_username_identity_none(self): - evt = {'detail': {}} - self.assertEqual( - utils.get_aws_username_from_event(Mock(), evt), - None - ) + evt = {"detail": {}} + self.assertEqual(utils.get_aws_username_from_event(Mock(), evt), None) def test_get_username_assumed_role(self): - evt = { - 'detail': { - 'userIdentity': { - 'type': 'AssumedRole', - 'arn': 'foo' - } - } - } - self.assertEqual( - utils.get_aws_username_from_event(Mock(), evt), - 'foo' - ) + evt = {"detail": {"userIdentity": {"type": "AssumedRole", "arn": "foo"}}} + self.assertEqual(utils.get_aws_username_from_event(Mock(), evt), "foo") def test_get_username_assumed_role_instance(self): - evt = { - 'detail': { - 'userIdentity': { - 'type': 'AssumedRole', - 'arn': 'foo/i-12345678' - } - } - } - self.assertEqual( - utils.get_aws_username_from_event(Mock(), evt), - None - ) + evt = {"detail": {"userIdentity": {"type": "AssumedRole", "arn": "foo/i-12345678"}}} + self.assertEqual(utils.get_aws_username_from_event(Mock(), evt), None) def test_get_username_assumed_role_lambda(self): - evt = { - 'detail': { - 'userIdentity': { - 'type': 'AssumedRole', - 'arn': 'foo/awslambda' - } - } - } - self.assertEqual( - utils.get_aws_username_from_event(Mock(), evt), - None - ) + evt = {"detail": {"userIdentity": {"type": "AssumedRole", "arn": "foo/awslambda"}}} + self.assertEqual(utils.get_aws_username_from_event(Mock(), evt), None) def test_get_username_assumed_role_colons(self): - evt = { - 'detail': { - 'userIdentity': { - 'type': 'AssumedRole', - 'arn': 'foo/bar:baz:blam' - } - } - } - self.assertEqual( - utils.get_aws_username_from_event(Mock(), evt), - 'baz:blam' - ) + evt = {"detail": {"userIdentity": {"type": "AssumedRole", "arn": "foo/bar:baz:blam"}}} + self.assertEqual(utils.get_aws_username_from_event(Mock(), evt), "baz:blam") def test_get_username_iam(self): - evt = { - 'detail': { - 'userIdentity': { - 'type': 'IAMUser', - 'userName': 'bar' - } - } - } - self.assertEqual( - utils.get_aws_username_from_event(Mock(), evt), - 'bar' - ) + evt = {"detail": {"userIdentity": {"type": "IAMUser", "userName": "bar"}}} + self.assertEqual(utils.get_aws_username_from_event(Mock(), evt), "bar") def test_get_username_root(self): - evt = { - 'detail': { - 'userIdentity': { - 'type': 'Root' - } - } - } - self.assertEqual( - utils.get_aws_username_from_event(Mock(), evt), - None - ) + evt = {"detail": {"userIdentity": {"type": "Root"}}} + self.assertEqual(utils.get_aws_username_from_event(Mock(), evt), None) def test_get_username_principalColon(self): - evt = { - 'detail': { - 'userIdentity': { - 'type': 'foo', - 'principalId': 'bar:baz' - } - } - } - self.assertEqual( - utils.get_aws_username_from_event(Mock(), evt), - 'baz' - ) + evt = {"detail": {"userIdentity": {"type": "foo", "principalId": "bar:baz"}}} + self.assertEqual(utils.get_aws_username_from_event(Mock(), evt), "baz") def test_get_username_principal(self): - evt = { - 'detail': { - 'userIdentity': { - 'type': 'foo', - 'principalId': 'blam' - } - } - } - self.assertEqual( - utils.get_aws_username_from_event(Mock(), evt), - 'blam' - ) + evt = {"detail": {"userIdentity": {"type": "foo", "principalId": "blam"}}} + self.assertEqual(utils.get_aws_username_from_event(Mock(), evt), "blam") class ProviderSelector(unittest.TestCase): - def test_get_providers(self): - self.assertEqual(utils.get_provider({'queue_url': 'asq://'}), utils.Providers.Azure) - self.assertEqual(utils.get_provider({'queue_url': 'sqs://'}), utils.Providers.AWS) + self.assertEqual(utils.get_provider({"queue_url": "asq://"}), utils.Providers.Azure) + self.assertEqual(utils.get_provider({"queue_url": "sqs://"}), utils.Providers.AWS) + self.assertEqual(utils.get_provider({"queue_url": "projects"}), utils.Providers.GCP) class DecryptTests(unittest.TestCase): - - @patch('c7n_mailer.utils.kms_decrypt') + @patch("c7n_mailer.utils.kms_decrypt") def test_kms_decrypt(self, kms_decrypt_mock): - utils.decrypt({'queue_url': 'aws', 'test': 'test'}, Mock(), Mock(), 'test') + utils.decrypt({"queue_url": "aws", "test": "test"}, Mock(), Mock(), "test") kms_decrypt_mock.assert_called_once() - @patch('c7n_mailer.azure_mailer.utils.azure_decrypt') + @patch("c7n_mailer.azure_mailer.utils.azure_decrypt") def test_azure_decrypt(self, azure_decrypt_mock): - utils.decrypt({'queue_url': 'asq://', 'test': 'test'}, Mock(), Mock(), 'test') + utils.decrypt({"queue_url": "asq://", "test": "test"}, Mock(), Mock(), "test") azure_decrypt_mock.assert_called_once() + @patch("c7n_mailer.gcp_mailer.utils.gcp_decrypt") + def test_gcp_decrypt(self, gcp_decrypt_mock): + utils.decrypt({"queue_url": "projects", "test": "test"}, Mock(), Mock(), "test") + gcp_decrypt_mock.assert_called_once() + def test_decrypt_none(self): - self.assertEqual(utils.decrypt({'queue_url': 'aws'}, Mock(), Mock(), 'test'), None) - self.assertEqual(utils.decrypt({'queue_url': 'asq://'}, Mock(), Mock(), 'test'), None) + self.assertEqual(utils.decrypt({"queue_url": "aws"}, Mock(), Mock(), "test"), None) + self.assertEqual(utils.decrypt({"queue_url": "asq://"}, Mock(), Mock(), "test"), None) + self.assertEqual(utils.decrypt({"queue_url": "projects"}, Mock(), Mock(), "test"), None) class OtherTests(unittest.TestCase): - def test_config_defaults(self): config = MAILER_CONFIG utils.setup_defaults(config) self.assertEqual( [ - config.get('region'), - config.get('ses_region'), - config.get('memory'), - config.get('runtime'), - config.get('timeout'), - config.get('subnets'), - config.get('security_groups'), - config.get('contact_tags'), - config.get('ldap_uri'), - config.get('ldap_bind_dn'), - config.get('ldap_bind_user'), - config.get('ldap_bind_password'), - config.get('datadog_api_key'), - config.get('slack_token'), - config.get('slack_webhook'), - config.get('queue_url') + config.get("region"), + config.get("ses_region"), + config.get("memory"), + config.get("runtime"), + config.get("timeout"), + config.get("subnets"), + config.get("security_groups"), + config.get("contact_tags"), + config.get("ldap_uri"), + config.get("ldap_bind_dn"), + config.get("ldap_bind_user"), + config.get("ldap_bind_password"), + config.get("datadog_api_key"), + config.get("slack_token"), + config.get("slack_webhook"), + config.get("queue_url"), ], [ - 'us-east-1', - config.get('region'), + "us-east-1", + config.get("region"), 1024, - 'python3.7', + "python3.7", 300, None, None, - MAILER_CONFIG['contact_tags'], - MAILER_CONFIG['ldap_uri'], + MAILER_CONFIG["contact_tags"], + MAILER_CONFIG["ldap_uri"], None, None, None, None, None, None, - MAILER_CONFIG['queue_url'] - ] + MAILER_CONFIG["queue_url"], + ], ) def test_get_jinja_env(self): - env = utils.get_jinja_env(MAILER_CONFIG['templates_folders']) + env = utils.get_jinja_env(MAILER_CONFIG["templates_folders"]) self.assertEqual(env.__class__, jinja2.environment.Environment) def test_get_rendered_jinja(self): # Jinja paths must always be forward slashes regardless of operating system template_abs_filename = os.path.abspath( - os.path.join(os.path.dirname(__file__), 'example.jinja')) - template_abs_filename = template_abs_filename.replace('\\', '/') - SQS_MESSAGE_1['action']['template'] = template_abs_filename + os.path.join(os.path.dirname(__file__), "example.jinja") + ) + template_abs_filename = template_abs_filename.replace("\\", "/") + SQS_MESSAGE_1["action"]["template"] = template_abs_filename body = utils.get_rendered_jinja( - ["test@test.com"], SQS_MESSAGE_1, [RESOURCE_1], - logging.getLogger('c7n_mailer.utils.email'), - 'template', 'default', MAILER_CONFIG['templates_folders']) + ["test@test.com"], + SQS_MESSAGE_1, + [RESOURCE_1], + logging.getLogger("c7n_mailer.utils.email"), + "template", + "default", + MAILER_CONFIG["templates_folders"], + ) self.assertIsNotNone(body) def test_get_date_age(self): @@ -412,14 +339,15 @@ def test_get_date_age(self): def test_get_message_subject(self): subject = utils.get_message_subject(SQS_MESSAGE_1) - self.assertEqual(subject, - SQS_MESSAGE_1['action']['subject'].replace('{{ account }}', - SQS_MESSAGE_1['account'])) + self.assertEqual( + subject, + SQS_MESSAGE_1["action"]["subject"].replace("{{ account }}", SQS_MESSAGE_1["account"]), + ) def test_kms_decrypt(self): - config = {'test': {'secret': 'mysecretpassword'}} + config = {"test": {"secret": "mysecretpassword"}} session_mock = Mock() - session_mock.client().get_secret().value = 'value' + session_mock.client().get_secret().value = "value" session_mock.get_session_for_resource.return_value = session_mock - self.assertEqual(utils.kms_decrypt(config, Mock(), session_mock, 'test'), config['test']) + self.assertEqual(utils.kms_decrypt(config, Mock(), session_mock, "test"), config["test"])