From 98797da16c04cbd4cdc1e717b4e56ab2b1160127 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 May 2023 15:32:47 +0000 Subject: [PATCH 1/4] chore(deps-dev): Bump pyarrow from 11.0.0 to 12.0.0 (#1656) --- poetry.lock | 54 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 28 insertions(+), 28 deletions(-) diff --git a/poetry.lock b/poetry.lock index ef83d8494..e3f6c49c8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1486,37 +1486,37 @@ files = [ [[package]] name = "pyarrow" -version = "11.0.0" +version = "12.0.0" description = "Python library for Apache Arrow" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pyarrow-11.0.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:40bb42afa1053c35c749befbe72f6429b7b5f45710e85059cdd534553ebcf4f2"}, - {file = "pyarrow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7c28b5f248e08dea3b3e0c828b91945f431f4202f1a9fe84d1012a761324e1ba"}, - {file = "pyarrow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a37bc81f6c9435da3c9c1e767324ac3064ffbe110c4e460660c43e144be4ed85"}, - {file = "pyarrow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7c53def8dbbc810282ad308cc46a523ec81e653e60a91c609c2233ae407689"}, - {file = "pyarrow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:25aa11c443b934078bfd60ed63e4e2d42461682b5ac10f67275ea21e60e6042c"}, - {file = "pyarrow-11.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:e217d001e6389b20a6759392a5ec49d670757af80101ee6b5f2c8ff0172e02ca"}, - {file = "pyarrow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad42bb24fc44c48f74f0d8c72a9af16ba9a01a2ccda5739a517aa860fa7e3d56"}, - {file = "pyarrow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d942c690ff24a08b07cb3df818f542a90e4d359381fbff71b8f2aea5bf58841"}, - {file = "pyarrow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f010ce497ca1b0f17a8243df3048055c0d18dcadbcc70895d5baf8921f753de5"}, - {file = "pyarrow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:2f51dc7ca940fdf17893227edb46b6784d37522ce08d21afc56466898cb213b2"}, - {file = "pyarrow-11.0.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:1cbcfcbb0e74b4d94f0b7dde447b835a01bc1d16510edb8bb7d6224b9bf5bafc"}, - {file = "pyarrow-11.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaee8f79d2a120bf3e032d6d64ad20b3af6f56241b0ffc38d201aebfee879d00"}, - {file = "pyarrow-11.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:410624da0708c37e6a27eba321a72f29d277091c8f8d23f72c92bada4092eb5e"}, - {file = "pyarrow-11.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2d53ba72917fdb71e3584ffc23ee4fcc487218f8ff29dd6df3a34c5c48fe8c06"}, - {file = "pyarrow-11.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:f12932e5a6feb5c58192209af1d2607d488cb1d404fbc038ac12ada60327fa34"}, - {file = "pyarrow-11.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:41a1451dd895c0b2964b83d91019e46f15b5564c7ecd5dcb812dadd3f05acc97"}, - {file = "pyarrow-11.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc2344be80e5dce4e1b80b7c650d2fc2061b9eb339045035a1baa34d5b8f1c"}, - {file = "pyarrow-11.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f40be0d7381112a398b93c45a7e69f60261e7b0269cc324e9f739ce272f4f70"}, - {file = "pyarrow-11.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:362a7c881b32dc6b0eccf83411a97acba2774c10edcec715ccaab5ebf3bb0835"}, - {file = "pyarrow-11.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:ccbf29a0dadfcdd97632b4f7cca20a966bb552853ba254e874c66934931b9841"}, - {file = "pyarrow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e99be85973592051e46412accea31828da324531a060bd4585046a74ba45854"}, - {file = "pyarrow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69309be84dcc36422574d19c7d3a30a7ea43804f12552356d1ab2a82a713c418"}, - {file = "pyarrow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da93340fbf6f4e2a62815064383605b7ffa3e9eeb320ec839995b1660d69f89b"}, - {file = "pyarrow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:caad867121f182d0d3e1a0d36f197df604655d0b466f1bc9bafa903aa95083e4"}, - {file = "pyarrow-11.0.0.tar.gz", hash = "sha256:5461c57dbdb211a632a48facb9b39bbeb8a7905ec95d768078525283caef5f6d"}, + {file = "pyarrow-12.0.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:3b97649c8a9a09e1d8dc76513054f1331bd9ece78ee39365e6bf6bc7503c1e94"}, + {file = "pyarrow-12.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bc4ea634dacb03936f50fcf59574a8e727f90c17c24527e488d8ceb52ae284de"}, + {file = "pyarrow-12.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d568acfca3faa565d663e53ee34173be8e23a95f78f2abfdad198010ec8f745"}, + {file = "pyarrow-12.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b50bb9a82dca38a002d7cbd802a16b1af0f8c50ed2ec94a319f5f2afc047ee9"}, + {file = "pyarrow-12.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:3d1733b1ea086b3c101427d0e57e2be3eb964686e83c2363862a887bb5c41fa8"}, + {file = "pyarrow-12.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:a7cd32fe77f967fe08228bc100433273020e58dd6caced12627bcc0a7675a513"}, + {file = "pyarrow-12.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:92fb031e6777847f5c9b01eaa5aa0c9033e853ee80117dce895f116d8b0c3ca3"}, + {file = "pyarrow-12.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:280289ebfd4ac3570f6b776515baa01e4dcbf17122c401e4b7170a27c4be63fd"}, + {file = "pyarrow-12.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:272f147d4f8387bec95f17bb58dcfc7bc7278bb93e01cb7b08a0e93a8921e18e"}, + {file = "pyarrow-12.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:0846ace49998825eda4722f8d7f83fa05601c832549c9087ea49d6d5397d8cec"}, + {file = "pyarrow-12.0.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:993287136369aca60005ee7d64130f9466489c4f7425f5c284315b0a5401ccd9"}, + {file = "pyarrow-12.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a7b6a765ee4f88efd7d8348d9a1f804487d60799d0428b6ddf3344eaef37282"}, + {file = "pyarrow-12.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c4fce253d5bdc8d62f11cfa3da5b0b34b562c04ce84abb8bd7447e63c2b327"}, + {file = "pyarrow-12.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e6be4d85707fc8e7a221c8ab86a40449ce62559ce25c94321df7c8500245888f"}, + {file = "pyarrow-12.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:ea830d9f66bfb82d30b5794642f83dd0e4a718846462d22328981e9eb149cba8"}, + {file = "pyarrow-12.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7b5b9f60d9ef756db59bec8d90e4576b7df57861e6a3d6a8bf99538f68ca15b3"}, + {file = "pyarrow-12.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99e559d27db36ad3a33868a475f03e3129430fc065accc839ef4daa12c6dab6"}, + {file = "pyarrow-12.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b0810864a593b89877120972d1f7af1d1c9389876dbed92b962ed81492d3ffc"}, + {file = "pyarrow-12.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:23a77d97f4d101ddfe81b9c2ee03a177f0e590a7e68af15eafa06e8f3cf05976"}, + {file = "pyarrow-12.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:2cc63e746221cddb9001f7281dee95fd658085dd5b717b076950e1ccc607059c"}, + {file = "pyarrow-12.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8c26912607e26c2991826bbaf3cf2b9c8c3e17566598c193b492f058b40d3a4"}, + {file = "pyarrow-12.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d8b90efc290e99a81d06015f3a46601c259ecc81ffb6d8ce288c91bd1b868c9"}, + {file = "pyarrow-12.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2466be046b81863be24db370dffd30a2e7894b4f9823fb60ef0a733c31ac6256"}, + {file = "pyarrow-12.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:0e36425b1c1cbf5447718b3f1751bf86c58f2b3ad299f996cd9b1aa040967656"}, + {file = "pyarrow-12.0.0.tar.gz", hash = "sha256:19c812d303610ab5d664b7b1de4051ae23565f9f94d04cbea9e50569746ae1ee"}, ] [package.dependencies] @@ -2694,4 +2694,4 @@ testing = ["pytest", "pytest-durations"] [metadata] lock-version = "2.0" python-versions = "<3.12,>=3.7.1" -content-hash = "f4f941854ed4b37daa184da10bb901bf9a3e838c5a485ee7e982aee103ff63c6" +content-hash = "f553f16d3aabadbac509b99eceef405fddcdca1c2db96ff4d530ab1738f63048" diff --git a/pyproject.toml b/pyproject.toml index 8031b70e3..f10f34119 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -112,7 +112,7 @@ types-requests = "^2.28.11" types-simplejson = "^3.18.0" types-PyYAML = "^6.0.12" coverage = {extras = ["toml"], version = "^7.2"} -pyarrow = "^11.0.0" +pyarrow = ">=11,<13" pytest-snapshot = "^0.9.0" # Cookiecutter tests From a24956706019db62725b4a704ace9b75f0239a1d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 2 May 2023 16:09:05 +0000 Subject: [PATCH 2/4] chore: Release v0.26.0 (#1657) Co-authored-by: kgpayne Co-authored-by: Ken Payne --- .github/ISSUE_TEMPLATE/bug.yml | 2 +- CHANGELOG.md | 6 ++++++ .../tap-template/{{cookiecutter.tap_id}}/pyproject.toml | 4 ++-- .../{{cookiecutter.target_id}}/pyproject.toml | 4 ++-- docs/conf.py | 2 +- pyproject.toml | 4 ++-- 6 files changed, 14 insertions(+), 8 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml index cfdd7159b..b3edc5d34 100644 --- a/.github/ISSUE_TEMPLATE/bug.yml +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -15,7 +15,7 @@ body: attributes: label: Singer SDK Version description: Version of the library you are using - placeholder: "0.25.0" + placeholder: "0.26.0" validations: required: true - type: checkboxes diff --git a/CHANGELOG.md b/CHANGELOG.md index ece05b091..3da880efd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,12 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## v0.26.0 (2023-05-02) + +### ✨ New + +- [#1623](https://github.com/meltano/sdk/issues/1623) Explicitly support URL params in string form + ## v0.25.0 (2023-04-25) ### ✨ New diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml index 8a274f538..be4c2aa70 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml @@ -21,7 +21,7 @@ packages = [ [tool.poetry.dependencies] python = "<3.12,>=3.7.1" -singer-sdk = { version="^0.25.0" } +singer-sdk = { version="^0.26.0" } fs-s3fs = { version = "^1.1.1", optional = true } {%- if cookiecutter.stream_type in ["REST", "GraphQL"] %} requests = "^2.28.2" @@ -38,7 +38,7 @@ black = "^23.1.0" pyupgrade = "^3.3.1" mypy = "^1.0.0" isort = "^5.11.5" -singer-sdk = { version="^0.25.0", extras = ["testing"] } +singer-sdk = { version="^0.26.0", extras = ["testing"] } {%- if cookiecutter.stream_type in ["REST", "GraphQL"] %} types-requests = "^2.28.11.12" {%- endif %} diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml b/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml index c5093b002..f0037615e 100644 --- a/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml @@ -21,7 +21,7 @@ packages = [ [tool.poetry.dependencies] python = "<3.12,>=3.7.1" -singer-sdk = { version="^0.25.0" } +singer-sdk = { version="^0.26.0" } fs-s3fs = { version = "^1.1.1", optional = true } {%- if cookiecutter.serialization_method != "SQL" %} requests = "^2.28.2" @@ -35,7 +35,7 @@ black = "^23.1.0" pyupgrade = "^3.3.1" mypy = "^1.0.0" isort = "^5.11.5" -singer-sdk = { version="^0.25.0", extras = ["testing"] } +singer-sdk = { version="^0.26.0", extras = ["testing"] } {%- if cookiecutter.serialization_method != "SQL" %} types-requests = "^2.28.11.12" {%- endif %} diff --git a/docs/conf.py b/docs/conf.py index 424244b07..6324862f9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -26,7 +26,7 @@ author = "Meltano Core Team and Contributors" # The full version, including alpha/beta/rc tags -release = "0.25.0" +release = "0.26.0" # -- General configuration --------------------------------------------------- diff --git a/pyproject.toml b/pyproject.toml index f10f34119..142305308 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "singer-sdk" -version = "0.25.0" +version = "0.26.0" description = "A framework for building Singer taps" authors = ["Meltano Team and Contributors"] maintainers = ["Meltano Team and Contributors"] @@ -140,7 +140,7 @@ norecursedirs = "cookiecutter" [tool.commitizen] name = "cz_version_bump" -version = "0.25.0" +version = "0.26.0" tag_format = "v$major.$minor.$patch$prerelease" version_files = [ "docs/conf.py", From 3609320cc89ca670965e0e490ec2939988eafecf Mon Sep 17 00:00:00 2001 From: "Edgar R. M" Date: Tue, 2 May 2023 11:09:01 -0600 Subject: [PATCH 3/4] chore: Enable `EM` (flake8-error-message) Ruff checks (#1658) --- .pre-commit-config.yaml | 2 +- pyproject.toml | 1 + .../sample_tap_gitlab/gitlab_rest_streams.py | 19 ++--- singer_sdk/_singerlib/messages.py | 10 +-- singer_sdk/authenticators.py | 19 ++--- singer_sdk/configuration/_dict_config.py | 14 ++-- singer_sdk/connectors/sql.py | 47 +++++++------ singer_sdk/helpers/_catalog.py | 19 ++--- singer_sdk/helpers/_flattening.py | 3 +- singer_sdk/helpers/_state.py | 16 +++-- singer_sdk/helpers/_typing.py | 47 +++++++------ singer_sdk/helpers/_util.py | 3 +- singer_sdk/io_base.py | 8 +-- singer_sdk/mapper.py | 70 ++++++++++--------- singer_sdk/mapper_base.py | 5 +- singer_sdk/pagination.py | 7 +- singer_sdk/plugin_base.py | 15 ++-- singer_sdk/sinks/core.py | 8 +-- singer_sdk/sinks/sql.py | 7 +- singer_sdk/streams/core.py | 51 +++++++------- singer_sdk/streams/graphql.py | 6 +- singer_sdk/streams/sql.py | 10 ++- singer_sdk/tap_base.py | 13 ++-- singer_sdk/target_base.py | 12 ++-- singer_sdk/testing/tap_tests.py | 3 +- singer_sdk/testing/templates.py | 28 +++++--- singer_sdk/typing.py | 11 +-- tests/core/rest/test_backoff.py | 6 +- tests/samples/test_target_sqlite.py | 10 ++- 29 files changed, 257 insertions(+), 213 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f07f3553b..8c28fc5ac 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,7 +35,7 @@ repos: )$ - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.0.263 + rev: v0.0.264 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/pyproject.toml b/pyproject.toml index 142305308..131891ff2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -241,6 +241,7 @@ select = [ "C4", # flake8-comprehensions "DTZ", # flake8-datetimezs "T10", # flake8-debugger + "EM", # flake8-error-message "ISC", # flake8-implicit-str-concat "ICN", # flake8-import-conventions "G", # flake8-logging-format diff --git a/samples/sample_tap_gitlab/gitlab_rest_streams.py b/samples/sample_tap_gitlab/gitlab_rest_streams.py index b6687d828..b34dd4887 100644 --- a/samples/sample_tap_gitlab/gitlab_rest_streams.py +++ b/samples/sample_tap_gitlab/gitlab_rest_streams.py @@ -76,18 +76,20 @@ def partitions(self) -> list[dict]: ] if "{group_id}" in self.path: if "group_ids" not in self.config: - raise ValueError( - f"Missing `group_ids` setting which is required for the " - f"'{self.name}' stream.", + msg = ( + "Missing `group_ids` setting which is required for the " + f"'{self.name}' stream." ) + raise ValueError(msg) return [ {"group_id": gid} for gid in t.cast(list, self.config.get("group_ids")) ] - raise ValueError( - "Could not detect partition type for Gitlab stream " - f"'{self.name}' ({self.path}). " - "Expected a URL path containing '{project_id}' or '{group_id}'. ", + msg = ( + f"Could not detect partition type for Gitlab stream '{self.name}' " + f"({self.path}). Expected a URL path containing '{{project_id}}' or " + "'{{group_id}}'." ) + raise ValueError(msg) class ProjectsStream(ProjectBasedStream): @@ -198,5 +200,6 @@ def get_url_params( """Return a dictionary of values to be used in parameterization.""" result = super().get_url_params(context, next_page_token) if not context or "epic_id" not in context: - raise ValueError("Cannot sync epic issues without already known epic IDs.") + msg = "Cannot sync epic issues without already known epic IDs." + raise ValueError(msg) return result diff --git a/singer_sdk/_singerlib/messages.py b/singer_sdk/_singerlib/messages.py index 9b0515ae1..7fc17e57d 100644 --- a/singer_sdk/_singerlib/messages.py +++ b/singer_sdk/_singerlib/messages.py @@ -112,10 +112,11 @@ def __post_init__(self) -> None: """ self.type = SingerMessageType.RECORD if self.time_extracted and not self.time_extracted.tzinfo: - raise ValueError( - "'time_extracted' must be either None " - "or an aware datetime (with a time zone)", + msg = ( + "'time_extracted' must be either None or an aware datetime (with a " + "time zone)" ) + raise ValueError(msg) if self.time_extracted: self.time_extracted = self.time_extracted.astimezone(pytz.utc) @@ -148,7 +149,8 @@ def __post_init__(self) -> None: if isinstance(self.bookmark_properties, (str, bytes)): self.bookmark_properties = [self.bookmark_properties] if self.bookmark_properties and not isinstance(self.bookmark_properties, list): - raise ValueError("bookmark_properties must be a string or list of strings") + msg = "bookmark_properties must be a string or list of strings" + raise ValueError(msg) @dataclass diff --git a/singer_sdk/authenticators.py b/singer_sdk/authenticators.py index b379f6a87..b146783d3 100644 --- a/singer_sdk/authenticators.py +++ b/singer_sdk/authenticators.py @@ -222,7 +222,8 @@ def __init__( auth_credentials = {key: value} if location not in ["header", "params"]: - raise ValueError("`type` must be one of 'header' or 'params'.") + msg = "`type` must be one of 'header' or 'params'." + raise ValueError(msg) if location == "header": if self._auth_headers is None: @@ -403,7 +404,8 @@ def auth_endpoint(self) -> str: ValueError: If the endpoint is not set. """ if not self._auth_endpoint: - raise ValueError("Authorization endpoint not set.") + msg = "Authorization endpoint not set." + raise ValueError(msg) return self._auth_endpoint @property @@ -447,9 +449,8 @@ def oauth_request_body(self) -> dict: Raises: NotImplementedError: If derived class does not override this method. """ - raise NotImplementedError( - "The `oauth_request_body` property was not defined in the subclass.", - ) + msg = "The `oauth_request_body` property was not defined in the subclass." + raise NotImplementedError(msg) @property def client_id(self) -> str | None: @@ -504,9 +505,8 @@ def update_access_token(self) -> None: try: token_response.raise_for_status() except requests.HTTPError as ex: - raise RuntimeError( - f"Failed OAuth login, response was '{token_response.json()}'. {ex}", - ) from ex + msg = f"Failed OAuth login, response was '{token_response.json()}'. {ex}" + raise RuntimeError(msg) from ex self.logger.info("OAuth authorization attempt was successful.") @@ -570,7 +570,8 @@ def oauth_request_payload(self) -> dict: ValueError: If the private key is not set. """ if not self.private_key: - raise ValueError("Missing 'private_key' property for OAuth payload.") + msg = "Missing 'private_key' property for OAuth payload." + raise ValueError(msg) private_key: bytes | t.Any = bytes(self.private_key, "UTF-8") if self.private_key_passphrase: diff --git a/singer_sdk/configuration/_dict_config.py b/singer_sdk/configuration/_dict_config.py index 23c89ca25..106f46d9e 100644 --- a/singer_sdk/configuration/_dict_config.py +++ b/singer_sdk/configuration/_dict_config.py @@ -54,11 +54,12 @@ def parse_environment_config( ) if is_string_array_type(config_schema["properties"][config_key]): if env_var_value[0] == "[" and env_var_value[-1] == "]": - raise ValueError( + msg = ( "A bracketed list was detected in the environment variable " - f"'{env_var_name}'. This syntax is no longer supported. " - "Please remove the brackets and try again.", + f"'{env_var_name}'. This syntax is no longer supported. Please " + "remove the brackets and try again." ) + raise ValueError(msg) result[config_key] = env_var_value.split(",") else: result[config_key] = env_var_value @@ -91,10 +92,11 @@ def merge_config_sources( continue if not Path(config_path).is_file(): - raise FileNotFoundError( - f"Could not locate config file at '{config_path}'." - "Please check that the file exists.", + msg = ( + f"Could not locate config file at '{config_path}'.Please check that " + "the file exists." ) + raise FileNotFoundError(msg) config.update(read_json_file(config_path)) diff --git a/singer_sdk/connectors/sql.py b/singer_sdk/connectors/sql.py index 1af4c6f9b..aecfbb0c1 100644 --- a/singer_sdk/connectors/sql.py +++ b/singer_sdk/connectors/sql.py @@ -171,9 +171,8 @@ def get_sqlalchemy_url(self, config: dict[str, t.Any]) -> str: ConfigValidationError: If no valid sqlalchemy_url can be found. """ if "sqlalchemy_url" not in config: - raise ConfigValidationError( - "Could not find or create 'sqlalchemy_url' for connection.", - ) + msg = "Could not find or create 'sqlalchemy_url' for connection." + raise ConfigValidationError(msg) return t.cast(str, config["sqlalchemy_url"]) @@ -211,9 +210,11 @@ def to_jsonschema_type( if issubclass(sql_type, sqlalchemy.types.TypeEngine): return th.to_jsonschema_type(sql_type) - raise ValueError(f"Unexpected type received: '{sql_type.__name__}'") + msg = f"Unexpected type received: '{sql_type.__name__}'" + raise ValueError(msg) - raise ValueError(f"Unexpected type received: '{type(sql_type).__name__}'") + msg = f"Unexpected type received: '{type(sql_type).__name__}'" + raise ValueError(msg) @staticmethod def to_sql_type(jsonschema_type: dict) -> sqlalchemy.types.TypeEngine: @@ -666,7 +667,8 @@ def create_empty_table( RuntimeError: if a variant schema is passed with no properties defined. """ if as_temp_table: - raise NotImplementedError("Temporary tables are not supported.") + msg = "Temporary tables are not supported." + raise NotImplementedError(msg) _ = partition_keys # Not supported in generic implementation. @@ -677,9 +679,8 @@ def create_empty_table( try: properties: dict = schema["properties"] except KeyError as e: - raise RuntimeError( - f"Schema for '{full_table_name}' does not define properties: {schema}", - ) from e + msg = f"Schema for '{full_table_name}' does not define properties: {schema}" + raise RuntimeError(msg) from e for property_name, property_jsonschema in properties.items(): is_primary_key = property_name in primary_keys columns.append( @@ -710,7 +711,8 @@ def _create_empty_column( NotImplementedError: if adding columns is not supported. """ if not self.allow_column_add: - raise NotImplementedError("Adding columns is not supported.") + msg = "Adding columns is not supported." + raise NotImplementedError(msg) column_add_ddl = self.get_column_add_ddl( table_name=full_table_name, @@ -803,7 +805,8 @@ def rename_column(self, full_table_name: str, old_name: str, new_name: str) -> N NotImplementedError: If `self.allow_column_rename` is false. """ if not self.allow_column_rename: - raise NotImplementedError("Renaming columns is not supported.") + msg = "Renaming columns is not supported." + raise NotImplementedError(msg) column_rename_ddl = self.get_column_rename_ddl( table_name=full_table_name, @@ -829,7 +832,8 @@ def merge_sql_types( ValueError: If sql_types argument has zero members. """ if not sql_types: - raise ValueError("Expected at least one member in `sql_types` argument.") + msg = "Expected at least one member in `sql_types` argument." + raise ValueError(msg) if len(sql_types) == 1: return sql_types[0] @@ -875,9 +879,8 @@ def merge_sql_types( elif str(opt) == str(current_type): return opt - raise ValueError( - f"Unable to merge sql types: {', '.join([str(t) for t in sql_types])}", - ) + msg = f"Unable to merge sql types: {', '.join([str(t) for t in sql_types])}" + raise ValueError(msg) def _sort_types( self, @@ -940,9 +943,8 @@ def _get_column_type( try: column = self.get_table_columns(full_table_name)[column_name] except KeyError as ex: - raise KeyError( - f"Column `{column_name}` does not exist in table `{full_table_name}`.", - ) from ex + msg = f"Column `{column_name}` does not exist in table `{full_table_name}`." + raise KeyError(msg) from ex return t.cast(sqlalchemy.types.TypeEngine, column.type) @@ -1108,11 +1110,12 @@ def _adapt_column_type( self.update_collation(compatible_sql_type, current_type_collation) if not self.allow_column_alter: - raise NotImplementedError( - "Altering columns is not supported. " - f"Could not convert column '{full_table_name}.{column_name}' " - f"from '{current_type}' to '{compatible_sql_type}'.", + msg = ( + "Altering columns is not supported. Could not convert column " + f"'{full_table_name}.{column_name}' from '{current_type}' to " + f"'{compatible_sql_type}'." ) + raise NotImplementedError(msg) alter_column_ddl = self.get_column_alter_ddl( table_name=full_table_name, diff --git a/singer_sdk/helpers/_catalog.py b/singer_sdk/helpers/_catalog.py index 9e6fdc639..2f0dbdcd1 100644 --- a/singer_sdk/helpers/_catalog.py +++ b/singer_sdk/helpers/_catalog.py @@ -47,11 +47,12 @@ def _pop_deselected_schema( schema_at_breadcrumb = schema_at_breadcrumb.get(crumb, {}) if not isinstance(schema_at_breadcrumb, dict): - raise ValueError( - f"Expected dictionary type instead of " - f"'{type(schema_at_breadcrumb).__name__}' '{schema_at_breadcrumb}' " - f"for '{stream_name}' bookmark '{str(breadcrumb)}' in '{schema}'", + msg = ( + "Expected dictionary type instead of " + f"'{type(schema_at_breadcrumb).__name__}' '{schema_at_breadcrumb}' for " + f"'{stream_name}' bookmark '{str(breadcrumb)}' in '{schema}'" ) + raise ValueError(msg) if "properties" not in schema_at_breadcrumb: return @@ -128,14 +129,16 @@ def set_catalog_stream_selected( """ breadcrumb = breadcrumb or () if not isinstance(breadcrumb, tuple): - raise ValueError( - f"Expected tuple value for breadcrumb '{breadcrumb}'. " - f"Got {type(breadcrumb).__name__}", + msg = ( + f"Expected tuple value for breadcrumb '{breadcrumb}'. Got " + f"{type(breadcrumb).__name__}" ) + raise ValueError(msg) catalog_entry = catalog.get_stream(stream_name) if not catalog_entry: - raise ValueError(f"Catalog entry missing for '{stream_name}'. Skipping.") + msg = f"Catalog entry missing for '{stream_name}'. Skipping." + raise ValueError(msg) md_entry = catalog_entry.metadata[breadcrumb] md_entry.selected = selected diff --git a/singer_sdk/helpers/_flattening.py b/singer_sdk/helpers/_flattening.py index f4c716ec4..972ee645f 100644 --- a/singer_sdk/helpers/_flattening.py +++ b/singer_sdk/helpers/_flattening.py @@ -269,7 +269,8 @@ def _key_func(item): sorted_items = sorted(items, key=_key_func) for k, g in itertools.groupby(sorted_items, key=_key_func): if len(list(g)) > 1: - raise ValueError(f"Duplicate column name produced in schema: {k}") + msg = f"Duplicate column name produced in schema: {k}" + raise ValueError(msg) # Return the (unsorted) result as a dict. return dict(items) diff --git a/singer_sdk/helpers/_state.py b/singer_sdk/helpers/_state.py index f0eb9f745..6144bebea 100644 --- a/singer_sdk/helpers/_state.py +++ b/singer_sdk/helpers/_state.py @@ -79,11 +79,11 @@ def _find_in_partitions_list( if partition_state["context"] == state_partition_context ] if len(found) > 1: - raise ValueError( - f"State file contains duplicate entries for partition: " - "{state_partition_context}.\n" - f"Matching state values were: {str(found)}", + msg = ( + "State file contains duplicate entries for partition: " + f"{{state_partition_context}}.\nMatching state values were: {str(found)}" ) + raise ValueError(msg) if found: return t.cast(dict, found[0]) @@ -120,7 +120,8 @@ def get_writeable_state_dict( ValueError: Raise an error if duplicate entries are found. """ if tap_state is None: - raise ValueError("Cannot write state to missing state dictionary.") + msg = "Cannot write state to missing state dictionary." + raise ValueError(msg) if "bookmarks" not in tap_state: tap_state["bookmarks"] = {} @@ -220,10 +221,11 @@ def increment_state( return if is_sorted: - raise InvalidStreamSortException( + msg = ( f"Unsorted data detected in stream. Latest value '{new_rk_value}' is " - f"smaller than previous max '{old_rk_value}'.", + f"smaller than previous max '{old_rk_value}'." ) + raise InvalidStreamSortException(msg) def _greater_than_signpost( diff --git a/singer_sdk/helpers/_typing.py b/singer_sdk/helpers/_typing.py index 89364f42d..189c29564 100644 --- a/singer_sdk/helpers/_typing.py +++ b/singer_sdk/helpers/_typing.py @@ -28,6 +28,17 @@ class DatetimeErrorTreatmentEnum(Enum): NULL = "null" +class EmptySchemaTypeError(Exception): + """Exception for when trying to detect type from empty type_dict.""" + + def __init__(self, *args: object) -> None: + msg = ( + "Could not detect type from empty type_dict. Did you forget to define a " + "property in the stream schema?" + ) + super().__init__(msg, *args) + + def to_json_compatible(val: t.Any) -> t.Any: """Return as string if datetime. JSON does not support proper datetime types. @@ -56,10 +67,11 @@ def append_type(type_dict: dict, new_type: str) -> dict: result["type"] = [*type_array, new_type] return result - raise ValueError( + msg = ( "Could not append type because the JSON schema for the dictionary " - f"`{type_dict}` appears to be invalid.", + f"`{type_dict}` appears to be invalid." ) + raise ValueError(msg) def is_secret_type(type_dict: dict) -> bool: @@ -122,17 +134,13 @@ def is_datetime_type(type_dict: dict) -> bool: Also returns True if 'date-time' is nested within an 'anyOf' type Array. """ if not type_dict: - raise ValueError( - "Could not detect type from empty type_dict. " - "Did you forget to define a property in the stream schema?", - ) + raise EmptySchemaTypeError if "anyOf" in type_dict: return any(is_datetime_type(type_dict) for type_dict in type_dict["anyOf"]) if "type" in type_dict: return type_dict.get("format") == "date-time" - raise ValueError( - f"Could not detect type of replication key using schema '{type_dict}'", - ) + msg = f"Could not detect type of replication key using schema '{type_dict}'" + raise ValueError(msg) def is_date_or_datetime_type(type_dict: dict) -> bool: @@ -155,9 +163,8 @@ def is_date_or_datetime_type(type_dict: dict) -> bool: if "type" in type_dict: return type_dict.get("format") in {"date", "date-time"} - raise ValueError( - f"Could not detect type of replication key using schema '{type_dict}'", - ) + msg = f"Could not detect type of replication key using schema '{type_dict}'" + raise ValueError(msg) def get_datelike_property_type(property_schema: dict) -> str | None: @@ -213,16 +220,14 @@ def handle_invalid_timestamp_in_record( def is_string_array_type(type_dict: dict) -> bool: """Return True if JSON Schema type definition is a string array.""" if not type_dict: - raise ValueError( - "Could not detect type from empty type_dict. " - "Did you forget to define a property in the stream schema?", - ) + raise EmptySchemaTypeError if "anyOf" in type_dict: return any(is_string_array_type(t) for t in type_dict["anyOf"]) if "type" not in type_dict: - raise ValueError(f"Could not detect type from schema '{type_dict}'") + msg = f"Could not detect type from schema '{type_dict}'" + raise ValueError(msg) return "array" in type_dict["type"] and bool(is_string_type(type_dict["items"])) @@ -230,16 +235,14 @@ def is_string_array_type(type_dict: dict) -> bool: def is_array_type(type_dict: dict) -> bool: """Return True if JSON Schema type is an array.""" if not type_dict: - raise ValueError( - "Could not detect type from empty type_dict. " - "Did you forget to define a property in the stream schema?", - ) + raise EmptySchemaTypeError if "anyOf" in type_dict: return any(is_array_type(t) for t in type_dict["anyOf"]) if "type" not in type_dict: - raise ValueError(f"Could not detect type from schema '{type_dict}'") + msg = f"Could not detect type from schema '{type_dict}'" + raise ValueError(msg) return "array" in type_dict["type"] diff --git a/singer_sdk/helpers/_util.py b/singer_sdk/helpers/_util.py index c4c09383c..0e1399043 100644 --- a/singer_sdk/helpers/_util.py +++ b/singer_sdk/helpers/_util.py @@ -12,7 +12,8 @@ def read_json_file(path: PurePath | str) -> dict[str, t.Any]: """Read json file, thowing an error if missing.""" if not path: - raise RuntimeError("Could not open file. Filepath not provided.") + msg = "Could not open file. Filepath not provided." + raise RuntimeError(msg) if not Path(path).exists(): msg = f"File at '{path}' was not found." diff --git a/singer_sdk/io_base.py b/singer_sdk/io_base.py index 9d3c1bb89..ef336ac18 100644 --- a/singer_sdk/io_base.py +++ b/singer_sdk/io_base.py @@ -46,9 +46,8 @@ def _assert_line_requires(line_dict: dict, requires: set[str]) -> None: """ if not requires.issubset(line_dict): missing = requires - set(line_dict) - raise Exception( - f"Line is missing required {', '.join(missing)} key(s): {line_dict}", - ) + msg = f"Line is missing required {', '.join(missing)} key(s): {line_dict}" + raise Exception(msg) def _process_lines(self, file_input: t.IO[str]) -> t.Counter[str]: """Internal method to process jsonl lines from a Singer tap. @@ -125,7 +124,8 @@ def _process_unknown_message(self, message_dict: dict) -> None: ValueError: raised if a message type is not recognized """ record_type = message_dict["type"] - raise ValueError(f"Unknown message type '{record_type}' in message.") + msg = f"Unknown message type '{record_type}' in message." + raise ValueError(msg) def _process_endofpipe(self) -> None: logger.debug("End of pipe reached") diff --git a/singer_sdk/mapper.py b/singer_sdk/mapper.py index 4fe794b68..5f8c5b36b 100644 --- a/singer_sdk/mapper.py +++ b/singer_sdk/mapper.py @@ -339,9 +339,8 @@ def _eval( names=names, ) except (simpleeval.InvalidExpression, SyntaxError) as ex: - raise MapExpressionError( - f"Failed to evaluate simpleeval expressions {expr}.", - ) from ex + msg = f"Failed to evaluate simpleeval expressions {expr}." + raise MapExpressionError(msg) from ex logging.debug("Eval result: %s = %s", expr, result) @@ -365,7 +364,8 @@ def _eval_type( ValueError: If the expression is ``None``. """ if expr is None: - raise ValueError("Expression should be str, not None") + msg = "Expression should be str, not None" + raise ValueError(msg) default = default or StringType() @@ -430,10 +430,11 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915 ) include_by_default = False else: - raise NotImplementedError( + msg = ( f"Option '{MAPPER_ELSE_OPTION}={stream_map[MAPPER_ELSE_OPTION]}' " - "is not supported.", + "is not supported." ) + raise NotImplementedError(msg) stream_map.pop(MAPPER_ELSE_OPTION) # Transform the schema as needed @@ -449,13 +450,14 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915 for prop_key, prop_def in list(stream_map.items()): if prop_def in {None, NULL_STRING}: if prop_key in (self.transformed_key_properties or []): - raise StreamMapConfigError( + msg = ( f"Removing key property '{prop_key}' is not permitted in " f"'{self.stream_alias}' stream map config. To remove a key " - "property, use the `__key_properties__` operator " - "to specify either a new list of key property names or `null` " - "to replicate with no key properties in the stream.", + "property, use the `__key_properties__` operator to specify " + "either a new list of key property names or `null` to " + "replicate with no key properties in the stream." ) + raise StreamMapConfigError(msg) transformed_schema["properties"].pop(prop_key, None) elif isinstance(prop_def, str): default_type: JSONTypeHelper = StringType() # Fallback to string @@ -476,18 +478,20 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915 ).to_dict(), ) else: - raise StreamMapConfigError( - f"Unexpected type '{type(prop_def).__name__}' in stream map " - f"for '{self.stream_alias}:{prop_key}'.", + msg = ( + f"Unexpected type '{type(prop_def).__name__}' in stream map for " + f"'{self.stream_alias}:{prop_key}'." ) + raise StreamMapConfigError(msg) for key_property in self.transformed_key_properties or []: if key_property not in transformed_schema["properties"]: - raise StreamMapConfigError( + msg = ( f"Invalid key properties for '{self.stream_alias}': " - f"[{','.join(self.transformed_key_properties)}]. " - f"Property '{key_property}' was not detected in schema.", + f"[{','.join(self.transformed_key_properties)}]. Property " + f"'{key_property}' was not detected in schema." ) + raise StreamMapConfigError(msg) if self.flattening_enabled: transformed_schema = self.flatten_schema(transformed_schema) @@ -523,10 +527,11 @@ def always_true(record: dict) -> bool: elif filter_rule is None: filter_fn = always_true else: - raise StreamMapConfigError( + msg = ( f"Unexpected filter rule type '{type(filter_rule).__name__}' in " - f"expression {str(filter_rule)}. Expected 'str' or 'None'.", + f"expression {str(filter_rule)}. Expected 'str' or 'None'." ) + raise StreamMapConfigError(msg) def transform_fn(record: dict) -> dict | None: nonlocal include_by_default, stream_map @@ -558,10 +563,11 @@ def transform_fn(record: dict) -> dict | None: ) continue - raise StreamMapConfigError( - f"Unexpected mapping type '{type(prop_def).__name__}' in " - f"map expression '{prop_def}'. Expected 'str' or 'None'.", + msg = ( + f"Unexpected mapping type '{type(prop_def).__name__}' " + f"in map expression '{prop_def}'. Expected 'str' or 'None'." ) + raise StreamMapConfigError(msg) return result @@ -602,10 +608,11 @@ def __init__( self.default_mapper_type = RemoveRecordTransform self.stream_maps_dict.pop(MAPPER_ELSE_OPTION) else: - raise StreamMapConfigError( + msg = ( f"Undefined transform for '{MAPPER_ELSE_OPTION}'' case: " - f"{self.stream_maps_dict[MAPPER_ELSE_OPTION]}", + f"{self.stream_maps_dict[MAPPER_ELSE_OPTION]}" ) + raise StreamMapConfigError(msg) else: logging.debug( "Operator '%s=None' was not found. " @@ -614,9 +621,8 @@ def __init__( ) for stream_map_key, stream_def in self.stream_maps_dict.items(): if stream_map_key.startswith("__"): - raise StreamMapConfigError( - f"Option '{stream_map_key}:{stream_def}' is not expected.", - ) + msg = f"Option '{stream_map_key}:{stream_def}' is not expected." + raise StreamMapConfigError(msg) def register_raw_streams_from_catalog(self, catalog: Catalog) -> None: """Register all streams as described in the catalog dict. @@ -689,9 +695,8 @@ def register_raw_stream_schema( # noqa: PLR0912 # TODO: Add any expected cases for str expressions (currently none) pass - raise StreamMapConfigError( - f"Option '{stream_map_key}:{stream_def}' is not expected.", - ) + msg = f"Option '{stream_map_key}:{stream_def}' is not expected." + raise StreamMapConfigError(msg) if stream_def is None or stream_def == NULL_STRING: if stream_name != stream_map_key: @@ -707,10 +712,11 @@ def register_raw_stream_schema( # noqa: PLR0912 continue if not isinstance(stream_def, dict): - raise StreamMapConfigError( - "Unexpected stream definition type. Expected str, dict, or None. " - f"Got '{type(stream_def).__name__}'.", + msg = ( + f"Unexpected stream definition type. Expected str, dict, or None. " + f"Got '{type(stream_def).__name__}'." ) + raise StreamMapConfigError(msg) if MAPPER_SOURCE_OPTION in stream_def: source_stream = stream_def.pop(MAPPER_SOURCE_OPTION) diff --git a/singer_sdk/mapper_base.py b/singer_sdk/mapper_base.py index 702963391..977da61ea 100644 --- a/singer_sdk/mapper_base.py +++ b/singer_sdk/mapper_base.py @@ -93,7 +93,7 @@ def map_activate_version_message( def map_batch_message( self, - message_dict: dict, + message_dict: dict, # noqa: ARG002 ) -> t.Iterable[singer.Message]: """Map a batch message to zero or more new messages. @@ -103,7 +103,8 @@ def map_batch_message( Raises: NotImplementedError: if not implemented by subclass. """ - raise NotImplementedError("BATCH messages are not supported by mappers.") + msg = "BATCH messages are not supported by mappers." + raise NotImplementedError(msg) @classproperty def cli(cls) -> t.Callable: # noqa: N805 diff --git a/singer_sdk/pagination.py b/singer_sdk/pagination.py index f2d34a56c..f00bb0920 100644 --- a/singer_sdk/pagination.py +++ b/singer_sdk/pagination.py @@ -112,10 +112,11 @@ def advance(self, response: Response) -> None: new_value = self.get_next(response) if new_value and new_value == self._value: - raise RuntimeError( - f"Loop detected in pagination. " - f"Pagination token {new_value} is identical to prior token.", + msg = ( + f"Loop detected in pagination. Pagination token {new_value} is " + "identical to prior token." ) + raise RuntimeError(msg) # Stop if new value None, empty string, 0, etc. if not new_value: diff --git a/singer_sdk/plugin_base.py b/singer_sdk/plugin_base.py index d19be13e8..799b8e9d0 100644 --- a/singer_sdk/plugin_base.py +++ b/singer_sdk/plugin_base.py @@ -99,7 +99,8 @@ def __init__( elif isinstance(config, dict): config_dict = config else: - raise ValueError(f"Error parsing config of type '{type(config).__name__}'.") + msg = f"Error parsing config of type '{type(config).__name__}'." + raise ValueError(msg) if parse_env_config: self.logger.info("Parsing env var for settings config...") config_dict.update(self._env_var_config) @@ -283,9 +284,8 @@ def _validate_config( summary += f"\n{warning}" if warnings_as_errors and raise_errors and warnings: - raise ConfigValidationError( - f"One or more warnings ocurred during validation: {warnings}", - ) + msg = f"One or more warnings ocurred during validation: {warnings}" + raise ConfigValidationError(msg) log_fn(summary) return warnings, errors @@ -392,10 +392,11 @@ def config_from_cli_args(*args: str) -> tuple[list[Path], bool]: # Validate config file paths before adding to list if not Path(config_path).is_file(): - raise FileNotFoundError( - f"Could not locate config file at '{config_path}'." - "Please check that the file exists.", + msg = ( + f"Could not locate config file at '{config_path}'.Please check " + "that the file exists." ) + raise FileNotFoundError(msg) config_files.append(Path(config_path)) diff --git a/singer_sdk/sinks/core.py b/singer_sdk/sinks/core.py index 69be8be61..91b750c81 100644 --- a/singer_sdk/sinks/core.py +++ b/singer_sdk/sinks/core.py @@ -420,7 +420,8 @@ def process_batch(self, context: dict) -> None: Raises: NotImplementedError: If derived class does not override this method. """ - raise NotImplementedError("No handling exists for process_batch().") + msg = "No handling exists for process_batch()." + raise NotImplementedError(msg) def mark_drained(self) -> None: """Reset `records_to_drain` and any other tracking.""" @@ -506,6 +507,5 @@ def process_batch_files( } self.process_batch(context) else: - raise NotImplementedError( - f"Unsupported batch encoding format: {encoding.format}", - ) + msg = f"Unsupported batch encoding format: {encoding.format}" + raise NotImplementedError(msg) diff --git a/singer_sdk/sinks/sql.py b/singer_sdk/sinks/sql.py index ebe94a512..07391ff49 100644 --- a/singer_sdk/sinks/sql.py +++ b/singer_sdk/sinks/sql.py @@ -190,10 +190,11 @@ def _check_conformed_names_not_duplicated( # filter duplicates = list(filter(lambda p: len(p[1]) > 1, grouped.items())) if duplicates: - raise ConformedNameClashException( - "Duplicate stream properties produced when " - f"conforming property names: {duplicates}", + msg = ( + "Duplicate stream properties produced when conforming property names: " + f"{duplicates}" ) + raise ConformedNameClashException(msg) def conform_schema(self, schema: dict) -> dict: """Return schema dictionary with property names conformed. diff --git a/singer_sdk/streams/core.py b/singer_sdk/streams/core.py index b380f2eb8..9da638e92 100644 --- a/singer_sdk/streams/core.py +++ b/singer_sdk/streams/core.py @@ -148,7 +148,8 @@ def __init__( if name: self.name: str = name if not self.name: - raise ValueError("Missing argument or class variable 'name'.") + msg = "Missing argument or class variable 'name'." + raise ValueError(msg) self.logger: logging.Logger = tap.logger self.metrics_logger = tap.metrics_logger @@ -171,9 +172,8 @@ def __init__( if schema: if isinstance(schema, (PathLike, str)): if not Path(schema).is_file(): - raise FileNotFoundError( - f"Could not find schema file '{self.schema_filepath}'.", - ) + msg = f"Could not find schema file '{self.schema_filepath}'." + raise FileNotFoundError(msg) self._schema_filepath = Path(schema) elif isinstance(schema, dict): @@ -181,18 +181,18 @@ def __init__( elif isinstance(schema, singer.Schema): self._schema = schema.to_dict() else: - raise ValueError( - f"Unexpected type {type(schema).__name__} for arg 'schema'.", - ) + msg = f"Unexpected type {type(schema).__name__} for arg 'schema'." + raise ValueError(msg) if self.schema_filepath: self._schema = json.loads(Path(self.schema_filepath).read_text()) if not self.schema: - raise ValueError( - f"Could not initialize schema for stream '{self.name}'. " - "A valid schema object or filepath was not provided.", + msg = ( + f"Could not initialize schema for stream '{self.name}'. A valid schema " + "object or filepath was not provided." ) + raise ValueError(msg) @property def stream_maps(self) -> list[StreamMap]: @@ -289,9 +289,8 @@ def get_starting_timestamp(self, context: dict | None) -> datetime.datetime | No return None if not self.is_timestamp_replication_key: - raise ValueError( - f"The replication key {self.replication_key} is not of timestamp type", - ) + msg = f"The replication key {self.replication_key} is not of timestamp type" + raise ValueError(msg) return t.cast(datetime.datetime, pendulum.parse(value)) @@ -757,10 +756,11 @@ def _increment_stream_state( # Advance state bookmark values if applicable if latest_record and self.replication_method == REPLICATION_INCREMENTAL: if not self.replication_key: - raise ValueError( - f"Could not detect replication key for '{self.name}' stream" - f"(replication method={self.replication_method})", + msg = ( + f"Could not detect replication key for '{self.name}' " + f"stream(replication method={self.replication_method})" ) + raise ValueError(msg) treat_as_sorted = self.is_sorted if not treat_as_sorted and self.state_partitioning_keys is not None: # Streams with custom state partitioning are not resumable. @@ -957,14 +957,12 @@ def _abort_sync(self, abort_reason: Exception) -> None: self._write_state_message() # Write out state message if pending. if self.replication_method == "FULL_TABLE": - raise AbortedSyncFailedException( - "Sync operation aborted for stream in 'FULL_TABLE' replication mode.", - ) from abort_reason + msg = "Sync operation aborted for stream in 'FULL_TABLE' replication mode." + raise AbortedSyncFailedException(msg) from abort_reason if is_state_non_resumable(self.stream_state): - raise AbortedSyncFailedException( - "Sync operation aborted and state is not in a resumable state.", - ) from abort_reason + msg = "Sync operation aborted and state is not in a resumable state." + raise AbortedSyncFailedException(msg) from abort_reason # Else, the sync operation can be assumed to be in a valid resumable state. raise AbortedSyncPausedException from abort_reason @@ -1267,13 +1265,14 @@ def get_child_context(self, record: dict, context: dict | None) -> dict | None: if child_stream.state_partitioning_keys is None: parent_type = type(self).__name__ child_type = type(child_stream).__name__ - raise NotImplementedError( + msg = ( "No child context behavior was defined between parent stream " - f"'{self.name}' and child stream '{child_stream.name}'." - "The parent stream must define " + f"'{self.name}' and child stream '{child_stream.name}'. " + f"The parent stream must define " f"`{parent_type}.get_child_context()` and/or the child stream " - f"must define `{child_type}.state_partitioning_keys`.", + f"must define `{child_type}.state_partitioning_keys`." ) + raise NotImplementedError(msg) return context or record diff --git a/singer_sdk/streams/graphql.py b/singer_sdk/streams/graphql.py index 8f90d3f90..01e5d41ee 100644 --- a/singer_sdk/streams/graphql.py +++ b/singer_sdk/streams/graphql.py @@ -37,7 +37,8 @@ def query(self) -> str: Raises: NotImplementedError: If the derived class doesn't define this property. """ - raise NotImplementedError("GraphQLStream `query` is not defined.") + msg = "GraphQLStream `query` is not defined." + raise NotImplementedError(msg) def prepare_request_payload( self, @@ -65,7 +66,8 @@ def prepare_request_payload( query = self.query if query is None: - raise ValueError("Graphql `query` property not set.") + msg = "Graphql `query` property not set." + raise ValueError(msg) if not query.lstrip().startswith("query"): # Wrap text in "query { }" if not already wrapped diff --git a/singer_sdk/streams/sql.py b/singer_sdk/streams/sql.py index e7155ebe8..4d7600f22 100644 --- a/singer_sdk/streams/sql.py +++ b/singer_sdk/streams/sql.py @@ -129,9 +129,8 @@ def fully_qualified_name(self) -> str: """ catalog_entry = self._singer_catalog_entry if not catalog_entry.table: - raise ValueError( - f"Missing table name in catalog entry: {catalog_entry.to_dict()}", - ) + msg = f"Missing table name in catalog entry: {catalog_entry.to_dict()}" + raise ValueError(msg) return self.connector.get_fully_qualified_name( table_name=catalog_entry.table, @@ -173,9 +172,8 @@ def get_records(self, context: dict | None) -> t.Iterable[dict[str, t.Any]]: not support partitioning. """ if context: - raise NotImplementedError( - f"Stream '{self.name}' does not support partitioning.", - ) + msg = f"Stream '{self.name}' does not support partitioning." + raise NotImplementedError(msg) selected_column_names = self.get_selected_schema()["properties"].keys() table = self.connector.get_table( diff --git a/singer_sdk/tap_base.py b/singer_sdk/tap_base.py index f2f24bd70..791ff1a0c 100644 --- a/singer_sdk/tap_base.py +++ b/singer_sdk/tap_base.py @@ -142,7 +142,8 @@ def state(self) -> dict: RuntimeError: If state has not been initialized. """ if self._state is None: - raise RuntimeError("Could not read from uninitialized state.") + msg = "Could not read from uninitialized state." + raise RuntimeError(msg) return self._state @property @@ -296,10 +297,11 @@ def discover_streams(self) -> t.Sequence[Stream]: NotImplementedError: If the tap implementation does not override this method. """ - raise NotImplementedError( - f"Tap '{self.name}' does not support discovery. " - "Please set the '--catalog' command line argument and try again.", + msg = ( + f"Tap '{self.name}' does not support discovery. Please set the '--catalog' " + "command line argument and try again." ) + raise NotImplementedError(msg) @final def load_streams(self) -> list[Stream]: @@ -358,7 +360,8 @@ def load_state(self, state: dict[str, t.Any]) -> None: initialized. """ if self.state is None: - raise ValueError("Cannot write to uninitialized state dictionary.") + msg = "Cannot write to uninitialized state dictionary." + raise ValueError(msg) for stream_name, stream_state in state.get("bookmarks", {}).items(): for key, val in stream_state.items(): diff --git a/singer_sdk/target_base.py b/singer_sdk/target_base.py index 1d8d97f4f..d44305d67 100644 --- a/singer_sdk/target_base.py +++ b/singer_sdk/target_base.py @@ -201,10 +201,11 @@ def get_sink_class(self, stream_name: str) -> type[Sink]: if self.default_sink_class: return self.default_sink_class - raise ValueError( - f"No sink class defined for '{stream_name}' " - "and no default sink class available.", + msg = ( + f"No sink class defined for '{stream_name}' and no default sink class " + "available." ) + raise ValueError(msg) def sink_exists(self, stream_name: str) -> bool: """Check sink for a stream. @@ -261,10 +262,11 @@ def _assert_sink_exists(self, stream_name: str) -> None: is not sent. """ if not self.sink_exists(stream_name): - raise RecordsWithoutSchemaException( + msg = ( f"A record for stream '{stream_name}' was encountered before a " - "corresponding schema.", + "corresponding schema." ) + raise RecordsWithoutSchemaException(msg) # Message handling diff --git a/singer_sdk/testing/tap_tests.py b/singer_sdk/testing/tap_tests.py index acdcd3ca1..c05211812 100644 --- a/singer_sdk/testing/tap_tests.py +++ b/singer_sdk/testing/tap_tests.py @@ -124,7 +124,8 @@ def test(self) -> None: (r[k] for k in primary_keys or []) for r in self.stream_records ] except KeyError as e: - raise AssertionError(f"Record missing primary key: {str(e)}") from e + msg = f"Record missing primary key: {str(e)}" + raise AssertionError(msg) from e count_unique_records = len(set(record_ids)) count_records = len(self.stream_records) assert count_unique_records == count_records, ( diff --git a/singer_sdk/testing/templates.py b/singer_sdk/testing/templates.py index dc25aaf6e..b00452520 100644 --- a/singer_sdk/testing/templates.py +++ b/singer_sdk/testing/templates.py @@ -41,7 +41,8 @@ def id(self) -> str: # noqa: A003 Raises: NotImplementedError: if not implemented. """ - raise NotImplementedError("ID not implemented.") + msg = "ID not implemented." + raise NotImplementedError(msg) def setup(self) -> None: """Test setup, called before `.test()`. @@ -52,7 +53,8 @@ def setup(self) -> None: Raises: NotImplementedError: if not implemented. """ - raise NotImplementedError("Setup method not implemented.") + msg = "Setup method not implemented." + raise NotImplementedError(msg) def test(self) -> None: """Main Test body, called after `.setup()` and before `.validate()`.""" @@ -67,7 +69,8 @@ def validate(self) -> None: Raises: NotImplementedError: if not implemented. """ - raise NotImplementedError("Method not implemented.") + msg = "Method not implemented." + raise NotImplementedError(msg) def teardown(self) -> None: """Test Teardown. @@ -78,7 +81,8 @@ def teardown(self) -> None: Raises: NotImplementedError: if not implemented. """ - raise NotImplementedError("Method not implemented.") + msg = "Method not implemented." + raise NotImplementedError(msg) def run( self, @@ -97,7 +101,8 @@ def run( ValueError: if Test instance does not have `name` and `type` properties. """ if not self.name or not self.plugin_type: - raise ValueError("Test must have 'name' and 'type' properties.") + msg = "Test must have 'name' and 'type' properties." + raise ValueError(msg) self.config = config self.resource = resource @@ -241,9 +246,9 @@ def non_null_attribute_values(self) -> list[t.Any]: @classmethod def evaluate( cls, - stream: Stream, - property_name: str, - property_schema: dict, + stream: Stream, # noqa: ARG003 + property_name: str, # noqa: ARG003 + property_schema: dict, # noqa: ARG003 ) -> bool: """Determine if this attribute test is applicable to the given property. @@ -255,10 +260,11 @@ def evaluate( Raises: NotImplementedError: if not implemented. """ - raise NotImplementedError( - "The 'evaluate' method is required for attribute tests, " - "but not implemented.", + msg = ( + "The 'evaluate' method is required for attribute tests, but not " + "implemented." ) + raise NotImplementedError(msg) class TargetTestTemplate(TestTemplate): diff --git a/singer_sdk/typing.py b/singer_sdk/typing.py index 2459b7230..c30874b34 100644 --- a/singer_sdk/typing.py +++ b/singer_sdk/typing.py @@ -547,11 +547,11 @@ def type_dict(self) -> dict: # type: ignore[override] wrapped = self.wrapped if isinstance(wrapped, type) and not isinstance(wrapped.type_dict, t.Mapping): - raise ValueError( - f"Type dict for {wrapped} is not defined. " - "Try instantiating it with a nested type such as " - f"{wrapped.__name__}(StringType).", + msg = ( + f"Type dict for {wrapped} is not defined. Try instantiating it with a " + f"nested type such as {wrapped.__name__}(StringType)." ) + raise ValueError(msg) return t.cast(dict, wrapped.type_dict) @@ -795,7 +795,8 @@ def to_jsonschema_type( ): type_name = from_type.__name__ else: - raise ValueError("Expected `str` or a SQLAlchemy `TypeEngine` object or type.") + msg = "Expected `str` or a SQLAlchemy `TypeEngine` object or type." + raise ValueError(msg) # Look for the type name within the known SQL type names: for sqltype, jsonschema_type in sqltype_lookup.items(): diff --git a/tests/core/rest/test_backoff.py b/tests/core/rest/test_backoff.py index b45366a3b..217fa32c3 100644 --- a/tests/core/rest/test_backoff.py +++ b/tests/core/rest/test_backoff.py @@ -35,9 +35,11 @@ def validate_response(self, response: requests.Response): super().validate_response(response) data = response.json() if data["status"] == self.StatusMessage.ERROR: - raise FatalAPIError("Error message found :(") + msg = "Error message found :(" + raise FatalAPIError(msg) if data["status"] == self.StatusMessage.UNAVAILABLE: - raise RetriableAPIError("API is unavailable") + msg = "API is unavailable" + raise RetriableAPIError(msg) @pytest.fixture diff --git a/tests/samples/test_target_sqlite.py b/tests/samples/test_target_sqlite.py index 719785b01..a7ca3b3c5 100644 --- a/tests/samples/test_target_sqlite.py +++ b/tests/samples/test_target_sqlite.py @@ -104,16 +104,14 @@ def test_sync_sqlite_to_sqlite( try: orig_json = json.loads(orig_out) except json.JSONDecodeError as e: - raise RuntimeError( - f"Could not parse JSON in orig line {line_num}: {orig_out}", - ) from e + msg = f"Could not parse JSON in orig line {line_num}: {orig_out}" + raise RuntimeError(msg) from e try: tapped_json = json.loads(new_out) except json.JSONDecodeError as e: - raise RuntimeError( - f"Could not parse JSON in new line {line_num}: {new_out}", - ) from e + msg = f"Could not parse JSON in new line {line_num}: {new_out}" + raise RuntimeError(msg) from e assert ( tapped_json["type"] == orig_json["type"] From dc307b6bb49fe1fe8875bc5f4e7a96c702d952ce Mon Sep 17 00:00:00 2001 From: "Edgar R. M" Date: Tue, 2 May 2023 13:52:16 -0600 Subject: [PATCH 4/4] feat(templates): Use Ruff to lint projects generated with Cookiecutter templates (#1648) * feat(templates): Use Ruff to lint Cookiecutter templates * Address import order issue in target template * Add end-to-end test for SQL target --------- Co-authored-by: Will Da Silva --- .github/workflows/constraints.txt | 1 + .github/workflows/cookiecutter-e2e.yml | 18 +++-- .pre-commit-config.yaml | 3 +- .../.pre-commit-config.yaml | 36 +++++++++ .../{{cookiecutter.tap_id}}/pyproject.toml | 36 +++++---- .../tests/{{ 'test' }}_core.py | 5 +- .../{{cookiecutter.tap_id}}/tox.ini | 34 +------- .../{{cookiecutter.library_name}}/tap.py | 6 +- ...kiecutter.stream_type %}client.py{%endif%} | 25 +++--- ...kiecutter.stream_type %}client.py{%endif%} | 12 ++- ...kiecutter.stream_type %}client.py{%endif%} | 36 +++++---- ...iecutter.stream_type %}streams.py{%endif%} | 4 +- ...kiecutter.stream_type %}client.py{%endif%} | 6 +- ...hod in ('OAuth2', 'JWT')%}auth.py{%endif%} | 19 +++-- .../.pre-commit-config.yaml | 36 +++++++++ .../{{cookiecutter.target_id}}/pyproject.toml | 30 ++++--- .../tests/{{ 'test' }}_core.py | 20 ++--- .../{{cookiecutter.target_id}}/tox.ini | 37 +-------- .../{{cookiecutter.library_name}}/__init__.py | 1 + .../{{cookiecutter.library_name}}/sinks.py | 13 ++- .../{{cookiecutter.library_name}}/target.py | 8 +- e2e-tests/cookiecutters/target-sql.json | 12 +++ noxfile.py | 16 +--- tests/cookiecutters/__init__.py | 0 tests/cookiecutters/test_cookiecutter.py | 81 ------------------- 25 files changed, 226 insertions(+), 269 deletions(-) create mode 100644 cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml create mode 100644 cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml create mode 100644 e2e-tests/cookiecutters/target-sql.json delete mode 100644 tests/cookiecutters/__init__.py delete mode 100644 tests/cookiecutters/test_cookiecutter.py diff --git a/.github/workflows/constraints.txt b/.github/workflows/constraints.txt index a689ece84..e25bb6599 100644 --- a/.github/workflows/constraints.txt +++ b/.github/workflows/constraints.txt @@ -1,4 +1,5 @@ pip==23.1.2 poetry==1.4.2 +pre-commit==3.2.2 nox==2023.4.22 nox-poetry==1.0.2 diff --git a/.github/workflows/cookiecutter-e2e.yml b/.github/workflows/cookiecutter-e2e.yml index 346bcaeea..447bb59aa 100644 --- a/.github/workflows/cookiecutter-e2e.yml +++ b/.github/workflows/cookiecutter-e2e.yml @@ -30,13 +30,18 @@ jobs: - name: Check out the repository uses: actions/checkout@v3.5.2 - - name: Install Poetry & Tox + - name: Upgrade pip env: PIP_CONSTRAINT: .github/workflows/constraints.txt + run: | + pip install pip + pip --version + + - name: Install Poetry run: | pipx install poetry poetry --version - pipx install tox + - name: Setup Python ${{ matrix.python-version }} uses: actions/setup-python@v4.6.0 with: @@ -45,12 +50,10 @@ jobs: cache: 'pip' cache-dependency-path: 'poetry.lock' - - name: Upgrade pip - env: - PIP_CONSTRAINT: .github/workflows/constraints.txt + - name: Install pre-commit run: | - pip install pip - pip --version + pipx install pre-commit + pre-commit --version - name: Install Nox env: @@ -63,6 +66,7 @@ jobs: - name: Run Nox run: | nox --python=${{ matrix.python-version }} --session=test_cookiecutter + - name: Upload build artifacts if: always() uses: actions/upload-artifact@v3 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8c28fc5ac..54cf76f30 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,7 +16,8 @@ repos: - id: check-yaml exclude: | (?x)^( - cookiecutter/.*/meltano.yml + cookiecutter/.*/meltano.yml| + cookiecutter/.*/.pre-commit-config.yaml )$ - id: end-of-file-fixer exclude: | diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml new file mode 100644 index 000000000..9a9725065 --- /dev/null +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml @@ -0,0 +1,36 @@ +ci: + autofix_prs: true + autoupdate_schedule: weekly + autoupdate_commit_msg: 'chore: pre-commit autoupdate' + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-json + - id: check-toml + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + +- repo: https://github.com/charliermarsh/ruff-pre-commit + rev: v0.0.263 + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + +- repo: https://github.com/psf/black + rev: 23.3.0 + hooks: + - id: black + +- repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.2.0 + hooks: + - id: mypy + additional_dependencies: + {%- if cookiecutter.stream_type == "SQL" %} + - sqlalchemy-stubs + {%- else %} + - types-requests + {%- endif %} diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml index be4c2aa70..d30f23a03 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml @@ -32,28 +32,11 @@ cached-property = "^1" # Remove after Python 3.7 support is dropped [tool.poetry.group.dev.dependencies] pytest = "^7.2.1" -flake8 = "^5.0.4" -darglint = "^1.8.1" -black = "^23.1.0" -pyupgrade = "^3.3.1" -mypy = "^1.0.0" -isort = "^5.11.5" singer-sdk = { version="^0.26.0", extras = ["testing"] } -{%- if cookiecutter.stream_type in ["REST", "GraphQL"] %} -types-requests = "^2.28.11.12" -{%- endif %} -{%- if cookiecutter.stream_type == 'SQL' %} -sqlalchemy-stubs = "^0.4" -{%- endif %} [tool.poetry.extras] s3 = ["fs-s3fs"] -[tool.isort] -profile = "black" -multi_line_output = 3 # Vertical Hanging Indent -src_paths = "{{cookiecutter.library_name}}" - [tool.mypy] python_version = "3.9" warn_unused_configs = true @@ -61,6 +44,25 @@ warn_unused_configs = true plugins = "sqlmypy" {%- endif %} +[tool.ruff] +ignore = [ + "ANN101", # missing-type-self + "ANN102", # missing-type-cls +] +select = ["ALL"] +src = ["{{cookiecutter.library_name}}"] +target-version = "py37" + + +[tool.ruff.flake8-annotations] +allow-star-arg-any = true + +[tool.ruff.isort] +known-first-party = ["{{cookiecutter.library_name}}"] + +[tool.ruff.pydocstyle] +convention = "google" + [build-system] requires = ["poetry-core>=1.0.8"] build-backend = "poetry.core.masonry.api" diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/{{ 'test' }}_core.py b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/{{ 'test' }}_core.py index 831fdde2b..26eef158d 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/{{ 'test' }}_core.py +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/{{ 'test' }}_core.py @@ -6,9 +6,8 @@ from {{ cookiecutter.library_name }}.tap import Tap{{ cookiecutter.source_name }} - SAMPLE_CONFIG = { - "start_date": datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d") + "start_date": datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d"), # TODO: Initialize minimal tap config } @@ -16,7 +15,7 @@ # Run standard built-in tap tests from the SDK: TestTap{{ cookiecutter.source_name }} = get_tap_test_class( tap_class=Tap{{ cookiecutter.source_name }}, - config=SAMPLE_CONFIG + config=SAMPLE_CONFIG, ) diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tox.ini b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tox.ini index 51b8fb921..70b9e4ac7 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tox.ini +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tox.ini @@ -1,7 +1,7 @@ # This file can be used to customize tox tests as well as other test frameworks like flake8 and mypy [tox] -envlist = py39 +envlist = py37, py38, py39, py310, py311 isolated_build = true [testenv] @@ -9,10 +9,6 @@ allowlist_externals = poetry commands = poetry install -v poetry run pytest - poetry run black --check {{cookiecutter.library_name}}/ - poetry run flake8 {{cookiecutter.library_name}} - poetry run pydocstyle {{cookiecutter.library_name}} - poetry run mypy {{cookiecutter.library_name}} --exclude='{{cookiecutter.library_name}}/tests' [testenv:pytest] # Run the python tests. @@ -21,31 +17,3 @@ envlist = py37, py38, py39, py310, py311 commands = poetry install -v poetry run pytest - -[testenv:format] -# Attempt to auto-resolve lint errors before they are raised. -# To execute, run `tox -e format` -commands = - poetry install -v - poetry run black {{cookiecutter.library_name}}/ - poetry run isort {{cookiecutter.library_name}} - -[testenv:lint] -# Raise an error if lint and style standards are not met. -# To execute, run `tox -e lint` -commands = - poetry install -v - poetry run black --check --diff {{cookiecutter.library_name}}/ - poetry run isort --check {{cookiecutter.library_name}} - poetry run flake8 {{cookiecutter.library_name}} - # refer to mypy.ini for specific settings - poetry run mypy . --exclude='tests' - -[flake8] -docstring-convention = google -ignore = W503 -max-line-length = 88 -max-complexity = 10 - -[pydocstyle] -ignore = D105,D203,D213 diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/tap.py b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/tap.py index 25f2e73d2..df3f9f754 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/tap.py +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/tap.py @@ -7,7 +7,7 @@ {%- if cookiecutter.stream_type == "SQL" %} -# from {{ cookiecutter.library_name }}.client import {{ cookiecutter.source_name }}Stream +from {{ cookiecutter.library_name }}.client import {{ cookiecutter.source_name }}Stream {%- else %} # TODO: Import your custom stream types here: @@ -20,6 +20,10 @@ class Tap{{ cookiecutter.source_name }}({{ 'SQL' if cookiecutter.stream_type == name = "{{ cookiecutter.tap_id }}" + {%- if cookiecutter.stream_type == "SQL" %} + default_stream_class = {{ cookiecutter.source_name }}Stream + {%- endif %} + # TODO: Update this section with the actual config values you expect: config_jsonschema = th.PropertiesList( th.Property( diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'GraphQL' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'GraphQL' == cookiecutter.stream_type %}client.py{%endif%} index 32c59e832..66505556d 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'GraphQL' == cookiecutter.stream_type %}client.py{%endif%} +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'GraphQL' == cookiecutter.stream_type %}client.py{%endif%} @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Iterable -import requests +import requests # noqa: TCH002 from singer_sdk.streams import {{ cookiecutter.stream_type }}Stream {%- if cookiecutter.auth_method in ("OAuth2", "JWT") %} @@ -16,20 +16,14 @@ from {{ cookiecutter.library_name }}.auth import {{ cookiecutter.source_name }}A class {{ cookiecutter.source_name }}Stream({{ cookiecutter.stream_type }}Stream): """{{ cookiecutter.source_name }} stream class.""" - # TODO: Set the API's base URL here: @property def url_base(self) -> str: - """Return the API URL root, configurable via tap settings. - - Returns: - The base URL for all requests. - """ - return self.config["api_url"] - - # Alternatively, use a static string for url_base: - # url_base = "https://api.mysample.com" + """Return the API URL root, configurable via tap settings.""" + # TODO: hardcode a value here, or retrieve it from self.config + return "https://api.mysample.com" {%- if cookiecutter.auth_method in ("OAuth2", "JWT") %} + @property def authenticator(self) -> {{ cookiecutter.source_name }}Authenticator: """Return a new authenticator object. @@ -68,10 +62,13 @@ class {{ cookiecutter.source_name }}Stream({{ cookiecutter.stream_type }}Stream) """ # TODO: Parse response body and return a set of records. resp_json = response.json() - for record in resp_json.get(""): - yield record + yield from resp_json.get("") - def post_process(self, row: dict, context: dict | None = None) -> dict | None: + def post_process( + self, + row: dict, + context: dict | None = None, # noqa: ARG002 + ) -> dict | None: """As needed, append or transform raw data to match expected structure. Args: diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'Other' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'Other' == cookiecutter.stream_type %}client.py{%endif%} index cec97fdfc..c2def6322 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'Other' == cookiecutter.stream_type %}client.py{%endif%} +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'Other' == cookiecutter.stream_type %}client.py{%endif%} @@ -10,7 +10,10 @@ from singer_sdk.streams import Stream class {{ cookiecutter.source_name }}Stream(Stream): """Stream class for {{ cookiecutter.source_name }} streams.""" - def get_records(self, context: dict | None) -> Iterable[dict]: + def get_records( + self, + context: dict | None, # noqa: ARG002 + ) -> Iterable[dict]: """Return a generator of record-type dictionary objects. The optional `context` argument is used to identify a specific slice of the @@ -24,7 +27,8 @@ class {{ cookiecutter.source_name }}Stream(Stream): NotImplementedError: If the implementation is TODO """ # TODO: Write logic to extract data from the upstream source. - # records = mysource.getall() + # records = mysource.getall() # noqa: ERA001 # for record in records: - # yield record.to_dict() - raise NotImplementedError("The method is not yet implemented (TODO)") + # yield record.to_dict() # noqa: ERA001 + errmsg = "The method is not yet implemented (TODO)" + raise NotImplementedError(errmsg) diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'REST' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'REST' == cookiecutter.stream_type %}client.py{%endif%} index e3431e7ca..4b65132b3 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'REST' == cookiecutter.stream_type %}client.py{%endif%} +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'REST' == cookiecutter.stream_type %}client.py{%endif%} @@ -51,17 +51,16 @@ SCHEMAS_DIR = Path(__file__).parent / Path("./schemas") class {{ cookiecutter.source_name }}Stream({{ cookiecutter.stream_type }}Stream): """{{ cookiecutter.source_name }} stream class.""" - # TODO: Set the API's base URL here: - url_base = "https://api.mysample.com" - - # OR use a dynamic url_base: - # @property - # def url_base(self) -> str: - # """Return the API URL root, configurable via tap settings.""" - # return self.config["api_url"] + @property + def url_base(self) -> str: + """Return the API URL root, configurable via tap settings.""" + # TODO: hardcode a value here, or retrieve it from self.config + return "https://api.mysample.com" records_jsonpath = "$[*]" # Or override `parse_response`. - next_page_token_jsonpath = "$.next_page" # Or override `get_next_page_token`. + + # Set this value or override `get_next_page_token`. + next_page_token_jsonpath = "$.next_page" # noqa: S105 {%- if cookiecutter.auth_method in ("OAuth2", "JWT") %} @@ -133,14 +132,14 @@ class {{ cookiecutter.source_name }}Stream({{ cookiecutter.stream_type }}Stream) headers["User-Agent"] = self.config.get("user_agent") {%- if cookiecutter.auth_method not in ("OAuth2", "JWT") %} # If not using an authenticator, you may also provide inline auth headers: - # headers["Private-Token"] = self.config.get("auth_token") + # headers["Private-Token"] = self.config.get("auth_token") # noqa: ERA001 {%- endif %} return headers def get_next_page_token( self, response: requests.Response, - previous_token: Any | None, + previous_token: Any | None, # noqa: ARG002 ) -> Any | None: """Return a token for identifying next page or None if no more pages. @@ -156,7 +155,8 @@ class {{ cookiecutter.source_name }}Stream({{ cookiecutter.stream_type }}Stream) # pagination loop. if self.next_page_token_jsonpath: all_matches = extract_jsonpath( - self.next_page_token_jsonpath, response.json() + self.next_page_token_jsonpath, + response.json(), ) first_match = next(iter(all_matches), None) next_page_token = first_match @@ -167,7 +167,7 @@ class {{ cookiecutter.source_name }}Stream({{ cookiecutter.stream_type }}Stream) def get_url_params( self, - context: dict | None, + context: dict | None, # noqa: ARG002 next_page_token: Any | None, ) -> dict[str, Any]: """Return a dictionary of values to be used in URL parameterization. @@ -189,8 +189,8 @@ class {{ cookiecutter.source_name }}Stream({{ cookiecutter.stream_type }}Stream) def prepare_request_payload( self, - context: dict | None, - next_page_token: Any | None, + context: dict | None, # noqa: ARG002 + next_page_token: Any | None, # noqa: ARG002 ) -> dict | None: """Prepare the data payload for the REST API request. @@ -218,7 +218,11 @@ class {{ cookiecutter.source_name }}Stream({{ cookiecutter.stream_type }}Stream) # TODO: Parse response body and return a set of records. yield from extract_jsonpath(self.records_jsonpath, input=response.json()) - def post_process(self, row: dict, context: dict | None = None) -> dict | None: + def post_process( + self, + row: dict, + context: dict | None = None, # noqa: ARG002 + ) -> dict | None: """As needed, append or transform raw data to match expected structure. Args: diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' != cookiecutter.stream_type %}streams.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' != cookiecutter.stream_type %}streams.py{%endif%} index 5ab485498..420017950 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' != cookiecutter.stream_type %}streams.py{%endif%} +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' != cookiecutter.stream_type %}streams.py{%endif%} @@ -22,7 +22,7 @@ class UsersStream({{ cookiecutter.source_name }}Stream): name = "users" # Optionally, you may also use `schema_filepath` in place of `schema`: - # schema_filepath = SCHEMAS_DIR / "users.json" + # schema_filepath = SCHEMAS_DIR / "users.json" # noqa: ERA001 schema = th.PropertiesList( th.Property("name", th.StringType), th.Property( @@ -107,7 +107,7 @@ class UsersStream({{ cookiecutter.source_name }}Stream): primary_keys = ["id"] replication_key = None # Optionally, you may also use `schema_filepath` in place of `schema`: - # schema_filepath = SCHEMAS_DIR / "users.json" + # schema_filepath = SCHEMAS_DIR / "users.json" # noqa: ERA001 schema = th.PropertiesList( th.Property("name", th.StringType), th.Property( diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' == cookiecutter.stream_type %}client.py{%endif%} index 4d050297d..a34cee4d0 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' == cookiecutter.stream_type %}client.py{%endif%} +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' == cookiecutter.stream_type %}client.py{%endif%} @@ -4,17 +4,17 @@ This includes {{ cookiecutter.source_name }}Stream and {{ cookiecutter.source_na """ from __future__ import annotations -from typing import Any, Iterable -import sqlalchemy +from typing import Any, Iterable +import sqlalchemy # noqa: TCH002 from singer_sdk import SQLConnector, SQLStream class {{ cookiecutter.source_name }}Connector(SQLConnector): """Connects to the {{ cookiecutter.source_name }} SQL source.""" - def get_sqlalchemy_url(cls, config: dict) -> str: + def get_sqlalchemy_url(self, config: dict) -> str: """Concatenate a SQLAlchemy URL for use in connecting to the source. Args: diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if cookiecutter.auth_method in ('OAuth2', 'JWT')%}auth.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if cookiecutter.auth_method in ('OAuth2', 'JWT')%}auth.py{%endif%} index 5258a5f1f..762cca155 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if cookiecutter.auth_method in ('OAuth2', 'JWT')%}auth.py{%endif%} +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if cookiecutter.auth_method in ('OAuth2', 'JWT')%}auth.py{%endif%} @@ -25,16 +25,16 @@ class {{ cookiecutter.source_name }}Authenticator(OAuthAuthenticator, metaclass= """ # TODO: Define the request body needed for the API. return { - 'resource': 'https://analysis.windows.net/powerbi/api', - 'scope': self.oauth_scopes, - 'client_id': self.config["client_id"], - 'username': self.config["username"], - 'password': self.config["password"], - 'grant_type': 'password', + "resource": "https://analysis.windows.net/powerbi/api", + "scope": self.oauth_scopes, + "client_id": self.config["client_id"], + "username": self.config["username"], + "password": self.config["password"], + "grant_type": "password", } @classmethod - def create_for_stream(cls, stream) -> "{{ cookiecutter.source_name }}Authenticator": + def create_for_stream(cls, stream) -> {{ cookiecutter.source_name }}Authenticator: # noqa: ANN001 """Instantiate an authenticator for a specific Singer stream. Args: @@ -57,7 +57,10 @@ class {{ cookiecutter.source_name }}Authenticator(OAuthJWTAuthenticator): """Authenticator class for {{ cookiecutter.source_name }}.""" @classmethod - def create_for_stream(cls, stream) -> "{{ cookiecutter.source_name }}Authenticator": + def create_for_stream( + cls, + stream, # noqa: ANN001 + ) -> {{ cookiecutter.source_name }}Authenticator: """Instantiate an authenticator for a specific Singer stream. Args: diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml b/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml new file mode 100644 index 000000000..1cf768fd1 --- /dev/null +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml @@ -0,0 +1,36 @@ +ci: + autofix_prs: true + autoupdate_schedule: weekly + autoupdate_commit_msg: 'chore: pre-commit autoupdate' + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-json + - id: check-toml + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + +- repo: https://github.com/charliermarsh/ruff-pre-commit + rev: v0.0.263 + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + +- repo: https://github.com/psf/black + rev: 23.3.0 + hooks: + - id: black + +- repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.2.0 + hooks: + - id: mypy + additional_dependencies: + {%- if cookiecutter.serialization_method != "SQL" %} + - sqlalchemy-stubs + {%- else %} + - types-requests + {%- endif %} diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml b/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml index f0037615e..90dcdddab 100644 --- a/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml @@ -29,24 +29,28 @@ requests = "^2.28.2" [tool.poetry.dev-dependencies] pytest = "^7.2.1" -flake8 = "^5.0.4" -darglint = "^1.8.1" -black = "^23.1.0" -pyupgrade = "^3.3.1" -mypy = "^1.0.0" -isort = "^5.11.5" singer-sdk = { version="^0.26.0", extras = ["testing"] } -{%- if cookiecutter.serialization_method != "SQL" %} -types-requests = "^2.28.11.12" -{%- endif %} [tool.poetry.extras] s3 = ["fs-s3fs"] -[tool.isort] -profile = "black" -multi_line_output = 3 # Vertical Hanging Indent -src_paths = "{{cookiecutter.library_name}}" +[tool.ruff] +ignore = [ + "ANN101", # missing-type-self + "ANN102", # missing-type-cls +] +select = ["ALL"] +src = ["{{cookiecutter.library_name}}"] +target-version = "py37" + +[tool.ruff.flake8-annotations] +allow-star-arg-any = true + +[tool.ruff.isort] +known-first-party = ["{{cookiecutter.library_name}}"] + +[tool.ruff.pydocstyle] +convention = "google" [build-system] requires = ["poetry-core>=1.0.8"] diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py b/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py index e2cffabed..2403b2a3a 100644 --- a/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py @@ -1,30 +1,30 @@ """Tests standard target features using the built-in SDK tests library.""" -import pytest -from typing import Dict, Any +from __future__ import annotations + +import typing as t +import pytest from singer_sdk.testing import get_target_test_class from {{ cookiecutter.library_name }}.target import Target{{ cookiecutter.destination_name }} - -SAMPLE_CONFIG: Dict[str, Any] = { - # TODO: Initialize minimal target config -} +# TODO: Initialize minimal target config +SAMPLE_CONFIG: dict[str, t.Any] = {} # Run standard built-in target tests from the SDK: StandardTargetTests = get_target_test_class( target_class=Target{{ cookiecutter.destination_name }}, - config=SAMPLE_CONFIG + config=SAMPLE_CONFIG, ) -class TestTarget{{ cookiecutter.destination_name }}(StandardTargetTests): +class TestTarget{{ cookiecutter.destination_name }}(StandardTargetTests): # type: ignore[misc, valid-type] # noqa: E501 """Standard Target Tests.""" @pytest.fixture(scope="class") - def resource(self): + def resource(self): # noqa: ANN201 """Generic external resource. This fixture is useful for setup and teardown of external resources, @@ -33,7 +33,7 @@ def resource(self): Example usage can be found in the SDK samples test suite: https://github.com/meltano/sdk/tree/main/tests/samples """ - yield "resource" + return "resource" # TODO: Create additional tests as appropriate for your target. diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/tox.ini b/cookiecutter/target-template/{{cookiecutter.target_id}}/tox.ini index 9b81ad325..70b9e4ac7 100644 --- a/cookiecutter/target-template/{{cookiecutter.target_id}}/tox.ini +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/tox.ini @@ -1,52 +1,19 @@ # This file can be used to customize tox tests as well as other test frameworks like flake8 and mypy [tox] -envlist = py38 -; envlist = py37, py38, py39 +envlist = py37, py38, py39, py310, py311 isolated_build = true [testenv] allowlist_externals = poetry - commands = poetry install -v poetry run pytest - poetry run black --check {{cookiecutter.library_name}}/ - poetry run flake8 {{cookiecutter.library_name}} - poetry run mypy . --exclude='tests' [testenv:pytest] # Run the python tests. # To execute, run `tox -e pytest` -envlist = py37, py38, py39 +envlist = py37, py38, py39, py310, py311 commands = poetry install -v poetry run pytest - -[testenv:format] -# Attempt to auto-resolve lint errors before they are raised. -# To execute, run `tox -e format` -commands = - poetry install -v - poetry run black {{cookiecutter.library_name}}/ - poetry run isort {{cookiecutter.library_name}} - -[testenv:lint] -# Raise an error if lint and style standards are not met. -# To execute, run `tox -e lint` -commands = - poetry install -v - poetry run black --check --diff {{cookiecutter.library_name}}/ - poetry run isort --check {{cookiecutter.library_name}} - poetry run flake8 {{cookiecutter.library_name}} - # refer to mypy.ini for specific settings - poetry run mypy {{cookiecutter.library_name}} --exclude='{{cookiecutter.library_name}}/tests' - -[flake8] -docstring-convention = google -ignore = W503 -max-line-length = 88 -max-complexity = 10 - -[pydocstyle] -ignore = D105,D203,D213 diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/__init__.py b/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/__init__.py index e69de29bb..a5f25ee1b 100644 --- a/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/__init__.py +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/__init__.py @@ -0,0 +1 @@ +"""Target for {{ cookiecutter.destination_name }}.""" diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py b/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py index 34146a97a..4e84d1284 100644 --- a/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py @@ -12,14 +12,19 @@ {%- set sinkclass = sinkclass_mapping[cookiecutter.serialization_method] %} -from singer_sdk.sinks import {{ sinkclass }} - {%- if sinkclass == "SQLSink" %} + from singer_sdk.connectors import SQLConnector -{% endif %} +from singer_sdk.sinks import {{ sinkclass }} +{%- else %} + +from singer_sdk.sinks import {{ sinkclass }} +{%- endif %} + {%- if sinkclass == "SQLSink" %} + class {{ cookiecutter.destination_name }}Connector(SQLConnector): """The connector for {{ cookiecutter.destination_name }}. @@ -55,7 +60,7 @@ def process_record(self, record: dict, context: dict) -> None: """ # Sample: # ------ - # client.write(record) + # client.write(record) # noqa: ERA001 {%- elif sinkclass == "BatchSink" -%} diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/target.py b/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/target.py index ac4299abb..f28bb4e94 100644 --- a/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/target.py +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/target.py @@ -4,8 +4,8 @@ {%- set target_class = "SQLTarget" if cookiecutter.serialization_method == "SQL" else "Target" %} -from singer_sdk.target_base import {{ target_class }} from singer_sdk import typing as th +from singer_sdk.target_base import {{ target_class }} from {{ cookiecutter.library_name }}.sinks import ( {{ cookiecutter.destination_name }}Sink, @@ -29,18 +29,18 @@ class Target{{ cookiecutter.destination_name }}({{ target_class }}): th.Property( "filepath", th.StringType, - description="The path to the target output file" + description="The path to the target output file", ), th.Property( "file_naming_scheme", th.StringType, - description="The scheme with which output files will be named" + description="The scheme with which output files will be named", ), th.Property( "auth_token", th.StringType, secret=True, # Flag config as protected. - description="The path to the target output file" + description="The path to the target output file", ), {%- endif %} ).to_dict() diff --git a/e2e-tests/cookiecutters/target-sql.json b/e2e-tests/cookiecutters/target-sql.json new file mode 100644 index 000000000..50a5c13c9 --- /dev/null +++ b/e2e-tests/cookiecutters/target-sql.json @@ -0,0 +1,12 @@ +{ + "cookiecutter": { + "destination_name": "MyDestinationName", + "admin_name": "FirstName LastName", + "target_id": "target-sql", + "library_name": "target_sql", + "variant": "None (Skip)", + "serialization_method": "SQL", + "_template": "./sdk/cookiecutter/target-template", + "_output_dir": "." + } + } diff --git a/noxfile.py b/noxfile.py index 45c0c5974..d04221535 100644 --- a/noxfile.py +++ b/noxfile.py @@ -182,8 +182,6 @@ def test_cookiecutter(session: Session, replay_file_path) -> None: Runs the lint task on the created test project. """ - args = session.posargs or ["1"] - cc_build_path = tempfile.gettempdir() folder_base_path = "./cookiecutter" @@ -232,14 +230,6 @@ def test_cookiecutter(session: Session, replay_file_path) -> None: session.run("poetry", "lock", external=True) session.run("poetry", "install", external=True) - for path in glob.glob(f"{Path.cwd()}/*", recursive=True): - if Path(path).name.startswith("tap") or Path( - path, - ).name.startswith("target"): - library_name = Path(path).name - - for argument in ["black", "isort", "flake8", "mypy"]: - session.run("poetry", "run", argument, library_name, external=True) - - if int(args[0]) == 1: - session.run("poetry", "run", "tox", "-e", "lint", external=True) + session.run("git", "init", external=True) + session.run("git", "add", ".", external=True) + session.run("pre-commit", "run", "--all-files", external=True) diff --git a/tests/cookiecutters/__init__.py b/tests/cookiecutters/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/cookiecutters/test_cookiecutter.py b/tests/cookiecutters/test_cookiecutter.py deleted file mode 100644 index 8c44802ae..000000000 --- a/tests/cookiecutters/test_cookiecutter.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Test cookiecutter template.""" - -from __future__ import annotations - -import logging -from logging import getLogger -from pathlib import Path - -import black -import yaml -from cookiecutter.main import cookiecutter -from flake8.api import legacy as flake8 -from mypy import api - -getLogger("flake8").propagate = False - - -def pytest_generate_tests(metafunc): - """Generate test cases for each Cookiecutter template.""" - id_list = [] - argvalues = [] - - for template in ["tap", "target"]: - template_dir = Path(f"cookiecutter/{template}-template") - case_key = f"{template}_id" - test_input_file = template_dir.joinpath("cookiecutter.tests.yml") - - for case in yaml.safe_load(test_input_file.read_text())["tests"]: - id_list.append(case[case_key]) - argvalues.append([template_dir, case]) - - metafunc.parametrize( - ["cookiecutter_dir", "cookiecutter_input"], - argvalues, - ids=id_list, - scope="function", - ) - - -def test_cookiecutter(outdir: str, cookiecutter_dir: Path, cookiecutter_input: dict): - """Generate and validate project from Cookiecutter.""" - style_guide_easy = flake8.get_style_guide( - ignore=["E302", "E303", "E305", "F401", "W391"], - ) - style_guide_strict = flake8.get_style_guide( - ignore=[ - "F401", # "imported but unused" - "W292", # "no newline at end of file" - "W391", # "blank line at end of file" - ], - ) - cookiecutter( - template=str(cookiecutter_dir), - output_dir=outdir, - extra_context=cookiecutter_input, - overwrite_if_exists=True, - no_input=True, - ) - for outfile in Path(outdir).glob("**/*.py"): - filepath = str(outfile.absolute()) - report = style_guide_easy.check_files([filepath]) - errors = report.get_statistics("E") - assert ( - not errors - ), f"Flake8 found violations in first pass of {filepath}: {errors}" - mypy_out = api.run([filepath, "--config", str(Path(outdir) / Path("tox.ini"))]) - mypy_msg = str(mypy_out[0]) - if not mypy_msg.startswith("Success:"): - logging.exception("MyPy validation failed: %s", mypy_msg) - assert not mypy_msg, f"MyPy validation failed for file {filepath}" - report = style_guide_strict.check_files([filepath]) - errors = report.get_statistics("E") - assert ( - not errors - ), f"Flake8 found violations in second pass of {filepath}: {errors}" - black.format_file_in_place( - Path(filepath), - fast=False, - mode=black.FileMode(), - write_back=black.WriteBack.NO, - )