From 0bbe4cb66f0b7ccc3d107678d0ca80b9027e98db Mon Sep 17 00:00:00 2001 From: "Taylor A. Murphy" Date: Fri, 14 Oct 2022 12:15:53 -0500 Subject: [PATCH 01/12] chore: Update logo (#1072) --- docs/_static/css/custom.css | 5 ----- docs/_static/img/logo.svg | 7 ++++++- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/_static/css/custom.css b/docs/_static/css/custom.css index 39d264efc..0a0427132 100644 --- a/docs/_static/css/custom.css +++ b/docs/_static/css/custom.css @@ -48,11 +48,6 @@ a:hover { top: 1px; } -.wy-side-nav-search > a::after { - display: inline-block; - content: "| SDK"; -} - .wy-side-nav-search > div.version { color: hsla(0, 0%, 100%, 0.7); } diff --git a/docs/_static/img/logo.svg b/docs/_static/img/logo.svg index 62c65875b..4a6a37dcf 100644 --- a/docs/_static/img/logo.svg +++ b/docs/_static/img/logo.svg @@ -1 +1,6 @@ - \ No newline at end of file + + + + + + From b9f693b5678305c115b9849dc2f3c8c682b38d3d Mon Sep 17 00:00:00 2001 From: "Edgar R. M" Date: Fri, 14 Oct 2022 20:16:29 -0500 Subject: [PATCH 02/12] docs: Document an example implementation and usage of `BaseHATEOASPaginator` (#1074) * docs: Document an example implementation and usage of `BaseHATEOASPaginator` * Fix type --- singer_sdk/pagination.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/singer_sdk/pagination.py b/singer_sdk/pagination.py index 8e294c3a5..4af95557f 100644 --- a/singer_sdk/pagination.py +++ b/singer_sdk/pagination.py @@ -180,6 +180,37 @@ class BaseHATEOASPaginator(BaseAPIPaginator[Optional[ParseResult]], metaclass=AB This paginator expects responses to have a key "next" with a value like "https://api.com/link/to/next-item". + + The :attr:`~singer_sdk.pagination.BaseAPIPaginator.current_value` attribute of + this paginator is a `urllib.parse.ParseResult`_ object. This object + contains the following attributes: + + - scheme + - netloc + - path + - params + - query + - fragment + + That means you can access and parse the query params in your stream like this: + + .. code-block:: python + + class MyHATEOASPaginator(BaseHATEOASPaginator): + def get_next_url(self, response): + return response.json().get("next") + + class MyStream(Stream): + def get_new_paginator(self): + return MyHATEOASPaginator() + + def get_url_params(self, next_page_token) -> dict: + if next_page_token: + return dict(parse_qsl(next_page_token.query)) + return {} + + .. _`urllib.parse.ParseResult`: + https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlparse """ def __init__(self, *args: Any, **kwargs: Any) -> None: From c859845997d5e22287f555a6e3d66eaabca69a7f Mon Sep 17 00:00:00 2001 From: "Edgar R. M" Date: Fri, 14 Oct 2022 20:35:22 -0500 Subject: [PATCH 03/12] docs: Add explanation and recommendations for context usage (#1060) --- docs/context_object.md | 18 ++++++++++++++++++ docs/index.rst | 1 + docs/partitioning.md | 25 ++++++++++++++----------- 3 files changed, 33 insertions(+), 11 deletions(-) create mode 100644 docs/context_object.md diff --git a/docs/context_object.md b/docs/context_object.md new file mode 100644 index 000000000..09e5ed469 --- /dev/null +++ b/docs/context_object.md @@ -0,0 +1,18 @@ +# The Context Object + +Many of the methods in the [Stream](classes/singer_sdk.Stream) class and its subclasses accept +a `context` parameter, which is a dictionary that contains information about the stream +partition or parent stream. + +## Best practices for using context + +- The context object MUST NOT contain any sensitive information, such as API keys or secrets. + This is because the context is

+ + 1) sent to the target, + 2) stored in the state file and + 3) logged to the console as a tag in metrics and logs.

+ +- The context object SHOULD NOT be mutated during the stream's lifecycle. This is because the + context is stored in the state file, and mutating it will cause the state file to be + inconsistent with the actual state of the stream. diff --git a/docs/index.rst b/docs/index.rst index 599d54f4b..5f3d0d448 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -63,6 +63,7 @@ Advanced Topics parent_streams partitioning + context_object stream_maps batch porting diff --git a/docs/partitioning.md b/docs/partitioning.md index 37a2f99e2..95103dc45 100644 --- a/docs/partitioning.md +++ b/docs/partitioning.md @@ -5,20 +5,23 @@ which each have their own state and their own distinct queryable domain. ## If you do not require partitioning -In general, developers can simply ignore the `context` arguments in methods like -`Stream.get_records()` if partitioning is not required. +In general, developers can simply ignore the [`context`](./context_object.md) arguments +in methods like [`Stream.get_records()`](singer_sdk.Stream.get_records) if partitioning +is not required. ## If you do want to utilize partitioning -To take advantage of partitioning, first override the `Stream.partitions` property, -returning a list of dictionaries, where each dictionary uniquely defines the construct of -a partition. For instance, a regionally partitioned stream may return the following: +To take advantage of partitioning, first override the +[`Stream.partitions`](singer_sdk.Stream.partitions) property, returning a list of +dictionaries, where each dictionary uniquely defines the construct of a partition. +For instance, a regionally partitioned stream may return the following: `[{"region": "us-east"}, {"region": "us-west"}, ...]` -For any streams which define the `partitions` property, the individual partitions will be -passed one at a time through the `partition` argument of methods which reference the -partition, such as `Stream.get_records()`. +For any streams which define the [`partitions`](singer_sdk.Stream.partitions) property, +the individual partitions will be passed one at a time through the `context` argument +of methods which reference the partition, such as +[`Stream.get_records()`](singer_sdk.Stream.get_records). ## If you are unsure if partitioning will be needed @@ -28,9 +31,9 @@ work regardless of whether partition is an actual partition context or `None`, m no partition is specified. When dealing with state, for example, developers may always call -`Stream.get_context_state(context)` even if `context` is not set. -The method will automatically return the state that is appropriate, either for the partition -or for the stream. +[`Stream.get_context_state(context)`](singer_sdk.Stream.get_context_state) even if +`context` is not set. The method will automatically return the state that is appropriate, +either for the partition or for the stream. ## Additional State Partitioning References From 3998cfe2e77bafe262889ee5635878db3590c49d Mon Sep 17 00:00:00 2001 From: MeltyBot <105875157+MeltyBot@users.noreply.github.com> Date: Sun, 16 Oct 2022 23:05:20 -0700 Subject: [PATCH 04/12] chore: Release v0.12.0 (#1075) --- .github/ISSUE_TEMPLATE/bug.yml | 2 +- CHANGELOG.md | 20 +++++++++++++++++++ .../{{cookiecutter.tap_id}}/pyproject.toml | 2 +- .../{{cookiecutter.target_id}}/pyproject.toml | 2 +- docs/conf.py | 2 +- pyproject.toml | 4 ++-- 6 files changed, 26 insertions(+), 6 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml index 8c6b9c816..aa611ba65 100644 --- a/.github/ISSUE_TEMPLATE/bug.yml +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -15,7 +15,7 @@ body: attributes: label: Singer SDK Version description: Version of the library you are using - placeholder: "0.11.1" + placeholder: "0.12.0" validations: required: true - type: dropdown diff --git a/CHANGELOG.md b/CHANGELOG.md index 3920d0733..636063672 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,26 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## v0.12.0 (2022-10-17) + +### ✨ New + +- [#1032](https://github.com/meltano/sdk/issues/1032) Support stream property selection push-down in SQL streams +- [#978](https://github.com/meltano/sdk/issues/978) Allow configuring a dedicated metrics logger + +### 🐛 Fixes + +- [#1043](https://github.com/meltano/sdk/issues/1043) Batch storage `split_url` to work with Windows paths -- _**Thanks @BuzzCutNorman!**_ +- [#826](https://github.com/meltano/sdk/issues/826) Remove Poetry version pin for GitHub Actions -- _**Thanks @visch!**_ +- [#1001](https://github.com/meltano/sdk/issues/1001) Use column name in `allow_column_alter` error message + +### 📚 Documentation Improvements + +- [#1060](https://github.com/meltano/sdk/issues/1060) Add explanation and recommendations for context usage +- [#1074](https://github.com/meltano/sdk/issues/1074) Document an example implementation and usage of `BaseHATEOASPaginator` +- [#1020](https://github.com/meltano/sdk/issues/1020) Fixed typo in `docs/stream_maps.md` -- _**Thanks @spacecowboy!**_ +- [#1006](https://github.com/meltano/sdk/issues/1006) Add links to Meltano install/tut + ## v0.11.1 (2022-09-27) ### 🐛 Fixes diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml index 992484bcd..a6cbd9187 100644 --- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml +++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml @@ -12,7 +12,7 @@ license = "Apache 2.0" [tool.poetry.dependencies] python = "<3.11,>=3.7.1" requests = "^2.25.1" -singer-sdk = "^0.11.1" +singer-sdk = "^0.12.0" [tool.poetry.dev-dependencies] pytest = "^6.2.5" diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml b/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml index d8cc4982c..d7d0c0f19 100644 --- a/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml +++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml @@ -12,7 +12,7 @@ license = "Apache 2.0" [tool.poetry.dependencies] python = "<3.11,>=3.7.1" requests = "^2.25.1" -singer-sdk = "^0.11.1" +singer-sdk = "^0.12.0" [tool.poetry.dev-dependencies] pytest = "^6.2.5" diff --git a/docs/conf.py b/docs/conf.py index 878f31544..37081c2b8 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -25,7 +25,7 @@ author = "Meltano Core Team and Contributors" # The full version, including alpha/beta/rc tags -release = "0.11.1" +release = "0.12.0" # -- General configuration --------------------------------------------------- diff --git a/pyproject.toml b/pyproject.toml index 5eb1166f3..a9dae0e53 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "singer-sdk" -version = "0.11.1" +version = "0.12.0" description = "A framework for building Singer taps" authors = ["Meltano Team and Contributors"] maintainers = ["Meltano Team and Contributors"] @@ -121,7 +121,7 @@ markers = [ [tool.commitizen] name = "cz_version_bump" -version = "0.11.1" +version = "0.12.0" tag_format = "v$major.$minor.$patch$prerelease" version_files = [ "docs/conf.py", From 947cd55fa49fce5d4e980807b5caab605d76f9fe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Oct 2022 10:25:47 -0500 Subject: [PATCH 05/12] chore(deps): Bump sphinx from 5.2.3 to 5.3.0 (#1079) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index d58901120..8920d32ed 100644 --- a/poetry.lock +++ b/poetry.lock @@ -973,8 +973,8 @@ optional = false python-versions = "*" [[package]] -name = "Sphinx" -version = "5.2.3" +name = "sphinx" +version = "5.3.0" description = "Python documentation generator" category = "main" optional = true @@ -2124,9 +2124,9 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] -Sphinx = [ - {file = "Sphinx-5.2.3.tar.gz", hash = "sha256:5b10cb1022dac8c035f75767799c39217a05fc0fe2d6fe5597560d38e44f0363"}, - {file = "sphinx-5.2.3-py3-none-any.whl", hash = "sha256:7abf6fabd7b58d0727b7317d5e2650ef68765bbe0ccb63c8795fa8683477eaa2"}, +sphinx = [ + {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, + {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, ] sphinx-autobuild = [ {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, From aa79ed3d01a2453561fd643c2b2d655a722fb7bd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Oct 2022 15:33:07 +0000 Subject: [PATCH 06/12] chore(deps): Bump pip from 22.2.2 to 22.3 in /.github/workflows (#1076) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/constraints.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/constraints.txt b/.github/workflows/constraints.txt index b9f243fbe..ee0f8b694 100644 --- a/.github/workflows/constraints.txt +++ b/.github/workflows/constraints.txt @@ -1,4 +1,4 @@ -pip==22.2.2 +pip==22.3 poetry==1.2.2 virtualenv==20.16.5 nox==2022.8.7 From a2e93da372169e49c0e202e1a9c98e2a83adfeaa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Oct 2022 11:02:39 -0500 Subject: [PATCH 07/12] chore(deps-dev): Bump types-python-dateutil from 2.8.19 to 2.8.19.2 (#1078) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Edgar R. M --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8920d32ed..494cafb42 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1199,7 +1199,7 @@ python-versions = ">=3.6" [[package]] name = "types-python-dateutil" -version = "2.8.19" +version = "2.8.19.2" description = "Typing stubs for python-dateutil" category = "dev" optional = false @@ -2259,8 +2259,8 @@ typed-ast = [ {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, ] types-python-dateutil = [ - {file = "types-python-dateutil-2.8.19.tar.gz", hash = "sha256:bfd3eb39c7253aea4ba23b10f69b017d30b013662bb4be4ab48b20bbd763f309"}, - {file = "types_python_dateutil-2.8.19-py3-none-any.whl", hash = "sha256:6284df1e4783d8fc6e587f0317a81333856b872a6669a282f8a325342bce7fa8"}, + {file = "types-python-dateutil-2.8.19.2.tar.gz", hash = "sha256:e6e32ce18f37765b08c46622287bc8d8136dc0c562d9ad5b8fd158c59963d7a7"}, + {file = "types_python_dateutil-2.8.19.2-py3-none-any.whl", hash = "sha256:3f4dbe465e7e0c6581db11fd7a4855d1355b78712b3f292bd399cd332247e9c0"}, ] types-pytz = [ {file = "types-pytz-2022.4.0.0.tar.gz", hash = "sha256:17d66e4b16e80ceae0787726f3a22288df7d3f9fdebeb091dc64b92c0e4ea09d"}, From bc3ff0825446ddc9e63ebff2c4ac5ecf7c046aa2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Oct 2022 11:33:06 -0500 Subject: [PATCH 08/12] chore(deps): Bump sqlalchemy from 1.4.41 to 1.4.42 (#1077) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Edgar R. M --- poetry.lock | 88 ++++++++++++++++++++++++++--------------------------- 1 file changed, 44 insertions(+), 44 deletions(-) diff --git a/poetry.lock b/poetry.lock index 494cafb42..3beac0522 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1122,8 +1122,8 @@ lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] -name = "SQLAlchemy" -version = "1.4.41" +name = "sqlalchemy" +version = "1.4.42" description = "Database Abstraction Library" category = "main" optional = false @@ -2164,48 +2164,48 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] -SQLAlchemy = [ - {file = "SQLAlchemy-1.4.41-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:13e397a9371ecd25573a7b90bd037db604331cf403f5318038c46ee44908c44d"}, - {file = "SQLAlchemy-1.4.41-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2d6495f84c4fd11584f34e62f9feec81bf373787b3942270487074e35cbe5330"}, - {file = "SQLAlchemy-1.4.41-cp27-cp27m-win32.whl", hash = "sha256:e570cfc40a29d6ad46c9aeaddbdcee687880940a3a327f2c668dd0e4ef0a441d"}, - {file = "SQLAlchemy-1.4.41-cp27-cp27m-win_amd64.whl", hash = "sha256:5facb7fd6fa8a7353bbe88b95695e555338fb038ad19ceb29c82d94f62775a05"}, - {file = "SQLAlchemy-1.4.41-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f37fa70d95658763254941ddd30ecb23fc4ec0c5a788a7c21034fc2305dab7cc"}, - {file = "SQLAlchemy-1.4.41-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:361f6b5e3f659e3c56ea3518cf85fbdae1b9e788ade0219a67eeaaea8a4e4d2a"}, - {file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0990932f7cca97fece8017414f57fdd80db506a045869d7ddf2dda1d7cf69ecc"}, - {file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd767cf5d7252b1c88fcfb58426a32d7bd14a7e4942497e15b68ff5d822b41ad"}, - {file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5102fb9ee2c258a2218281adcb3e1918b793c51d6c2b4666ce38c35101bb940e"}, - {file = "SQLAlchemy-1.4.41-cp310-cp310-win32.whl", hash = "sha256:2082a2d2fca363a3ce21cfa3d068c5a1ce4bf720cf6497fb3a9fc643a8ee4ddd"}, - {file = "SQLAlchemy-1.4.41-cp310-cp310-win_amd64.whl", hash = "sha256:e4b12e3d88a8fffd0b4ca559f6d4957ed91bd4c0613a4e13846ab8729dc5c251"}, - {file = "SQLAlchemy-1.4.41-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:90484a2b00baedad361402c257895b13faa3f01780f18f4a104a2f5c413e4536"}, - {file = "SQLAlchemy-1.4.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b67fc780cfe2b306180e56daaa411dd3186bf979d50a6a7c2a5b5036575cbdbb"}, - {file = "SQLAlchemy-1.4.41-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ad2b727fc41c7f8757098903f85fafb4bf587ca6605f82d9bf5604bd9c7cded"}, - {file = "SQLAlchemy-1.4.41-cp311-cp311-win32.whl", hash = "sha256:59bdc291165b6119fc6cdbc287c36f7f2859e6051dd923bdf47b4c55fd2f8bd0"}, - {file = "SQLAlchemy-1.4.41-cp311-cp311-win_amd64.whl", hash = "sha256:d2e054aed4645f9b755db85bc69fc4ed2c9020c19c8027976f66576b906a74f1"}, - {file = "SQLAlchemy-1.4.41-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:4ba7e122510bbc07258dc42be6ed45997efdf38129bde3e3f12649be70683546"}, - {file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0dcf127bb99458a9d211e6e1f0f3edb96c874dd12f2503d4d8e4f1fd103790b"}, - {file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e16c2be5cb19e2c08da7bd3a87fed2a0d4e90065ee553a940c4fc1a0fb1ab72b"}, - {file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5ebeeec5c14533221eb30bad716bc1fd32f509196318fb9caa7002c4a364e4c"}, - {file = "SQLAlchemy-1.4.41-cp36-cp36m-win32.whl", hash = "sha256:3e2ef592ac3693c65210f8b53d0edcf9f4405925adcfc031ff495e8d18169682"}, - {file = "SQLAlchemy-1.4.41-cp36-cp36m-win_amd64.whl", hash = "sha256:eb30cf008850c0a26b72bd1b9be6730830165ce049d239cfdccd906f2685f892"}, - {file = "SQLAlchemy-1.4.41-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:c23d64a0b28fc78c96289ffbd0d9d1abd48d267269b27f2d34e430ea73ce4b26"}, - {file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eb8897367a21b578b26f5713833836f886817ee2ffba1177d446fa3f77e67c8"}, - {file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14576238a5f89bcf504c5f0a388d0ca78df61fb42cb2af0efe239dc965d4f5c9"}, - {file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:639e1ae8d48b3c86ffe59c0daa9a02e2bfe17ca3d2b41611b30a0073937d4497"}, - {file = "SQLAlchemy-1.4.41-cp37-cp37m-win32.whl", hash = "sha256:0005bd73026cd239fc1e8ccdf54db58b6193be9a02b3f0c5983808f84862c767"}, - {file = "SQLAlchemy-1.4.41-cp37-cp37m-win_amd64.whl", hash = "sha256:5323252be2bd261e0aa3f33cb3a64c45d76829989fa3ce90652838397d84197d"}, - {file = "SQLAlchemy-1.4.41-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:05f0de3a1dc3810a776275763764bb0015a02ae0f698a794646ebc5fb06fad33"}, - {file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0002e829142b2af00b4eaa26c51728f3ea68235f232a2e72a9508a3116bd6ed0"}, - {file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22ff16cedab5b16a0db79f1bc99e46a6ddececb60c396562e50aab58ddb2871c"}, - {file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccfd238f766a5bb5ee5545a62dd03f316ac67966a6a658efb63eeff8158a4bbf"}, - {file = "SQLAlchemy-1.4.41-cp38-cp38-win32.whl", hash = "sha256:58bb65b3274b0c8a02cea9f91d6f44d0da79abc993b33bdedbfec98c8440175a"}, - {file = "SQLAlchemy-1.4.41-cp38-cp38-win_amd64.whl", hash = "sha256:ce8feaa52c1640de9541eeaaa8b5fb632d9d66249c947bb0d89dd01f87c7c288"}, - {file = "SQLAlchemy-1.4.41-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:199a73c31ac8ea59937cc0bf3dfc04392e81afe2ec8a74f26f489d268867846c"}, - {file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676d51c9f6f6226ae8f26dc83ec291c088fe7633269757d333978df78d931ab"}, - {file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:036d8472356e1d5f096c5e0e1a7e0f9182140ada3602f8fff6b7329e9e7cfbcd"}, - {file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2307495d9e0ea00d0c726be97a5b96615035854972cc538f6e7eaed23a35886c"}, - {file = "SQLAlchemy-1.4.41-cp39-cp39-win32.whl", hash = "sha256:9c56e19780cd1344fcd362fd6265a15f48aa8d365996a37fab1495cae8fcd97d"}, - {file = "SQLAlchemy-1.4.41-cp39-cp39-win_amd64.whl", hash = "sha256:f5fa526d027d804b1f85cdda1eb091f70bde6fb7d87892f6dd5a48925bc88898"}, - {file = "SQLAlchemy-1.4.41.tar.gz", hash = "sha256:0292f70d1797e3c54e862e6f30ae474014648bc9c723e14a2fda730adb0a9791"}, +sqlalchemy = [ + {file = "SQLAlchemy-1.4.42-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:28e881266a172a4d3c5929182fde6bb6fba22ac93f137d5380cc78a11a9dd124"}, + {file = "SQLAlchemy-1.4.42-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ca9389a00f639383c93ed00333ed763812f80b5ae9e772ea32f627043f8c9c88"}, + {file = "SQLAlchemy-1.4.42-cp27-cp27m-win32.whl", hash = "sha256:1d0c23ecf7b3bc81e29459c34a3f4c68ca538de01254e24718a7926810dc39a6"}, + {file = "SQLAlchemy-1.4.42-cp27-cp27m-win_amd64.whl", hash = "sha256:6c9d004eb78c71dd4d3ce625b80c96a827d2e67af9c0d32b1c1e75992a7916cc"}, + {file = "SQLAlchemy-1.4.42-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9e3a65ce9ed250b2f096f7b559fe3ee92e6605fab3099b661f0397a9ac7c8d95"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:2e56dfed0cc3e57b2f5c35719d64f4682ef26836b81067ee6cfad062290fd9e2"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b42c59ffd2d625b28cdb2ae4cde8488543d428cba17ff672a543062f7caee525"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22459fc1718785d8a86171bbe7f01b5c9d7297301ac150f508d06e62a2b4e8d2"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df76e9c60879fdc785a34a82bf1e8691716ffac32e7790d31a98d7dec6e81545"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-win32.whl", hash = "sha256:e7e740453f0149437c101ea4fdc7eea2689938c5760d7dcc436c863a12f1f565"}, + {file = "SQLAlchemy-1.4.42-cp310-cp310-win_amd64.whl", hash = "sha256:effc89e606165ca55f04f3f24b86d3e1c605e534bf1a96e4e077ce1b027d0b71"}, + {file = "SQLAlchemy-1.4.42-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:97ff50cd85bb907c2a14afb50157d0d5486a4b4639976b4a3346f34b6d1b5272"}, + {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12c6949bae10f1012ab5c0ea52ab8db99adcb8c7b717938252137cdf694c775"}, + {file = "SQLAlchemy-1.4.42-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11b2ec26c5d2eefbc3e6dca4ec3d3d95028be62320b96d687b6e740424f83b7d"}, + {file = "SQLAlchemy-1.4.42-cp311-cp311-win32.whl", hash = "sha256:6045b3089195bc008aee5c273ec3ba9a93f6a55bc1b288841bd4cfac729b6516"}, + {file = "SQLAlchemy-1.4.42-cp311-cp311-win_amd64.whl", hash = "sha256:0501f74dd2745ec38f44c3a3900fb38b9db1ce21586b691482a19134062bf049"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:6e39e97102f8e26c6c8550cb368c724028c575ec8bc71afbbf8faaffe2b2092a"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15d878929c30e41fb3d757a5853b680a561974a0168cd33a750be4ab93181628"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa5b7eb2051e857bf83bade0641628efe5a88de189390725d3e6033a1fff4257"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1c5f8182b4f89628d782a183d44db51b5af84abd6ce17ebb9804355c88a7b5"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-win32.whl", hash = "sha256:a7dd5b7b34a8ba8d181402d824b87c5cee8963cb2e23aa03dbfe8b1f1e417cde"}, + {file = "SQLAlchemy-1.4.42-cp36-cp36m-win_amd64.whl", hash = "sha256:5ede1495174e69e273fad68ad45b6d25c135c1ce67723e40f6cf536cb515e20b"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:9256563506e040daddccaa948d055e006e971771768df3bb01feeb4386c242b0"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4948b6c5f4e56693bbeff52f574279e4ff972ea3353f45967a14c30fb7ae2beb"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1811a0b19a08af7750c0b69e38dec3d46e47c4ec1d74b6184d69f12e1c99a5e0"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b01d9cd2f9096f688c71a3d0f33f3cd0af8549014e66a7a7dee6fc214a7277d"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-win32.whl", hash = "sha256:bd448b262544b47a2766c34c0364de830f7fb0772d9959c1c42ad61d91ab6565"}, + {file = "SQLAlchemy-1.4.42-cp37-cp37m-win_amd64.whl", hash = "sha256:04f2598c70ea4a29b12d429a80fad3a5202d56dce19dd4916cc46a965a5ca2e9"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3ab7c158f98de6cb4f1faab2d12973b330c2878d0c6b689a8ca424c02d66e1b3"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee377eb5c878f7cefd633ab23c09e99d97c449dd999df639600f49b74725b80"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:934472bb7d8666727746a75670a1f8d91a9cae8c464bba79da30a0f6faccd9e1"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb94a3d1ba77ff2ef11912192c066f01e68416f554c194d769391638c8ad09a"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-win32.whl", hash = "sha256:f0f574465b78f29f533976c06b913e54ab4980b9931b69aa9d306afff13a9471"}, + {file = "SQLAlchemy-1.4.42-cp38-cp38-win_amd64.whl", hash = "sha256:a85723c00a636eed863adb11f1e8aaa36ad1c10089537823b4540948a8429798"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5ce6929417d5dce5ad1d3f147db81735a4a0573b8fb36e3f95500a06eaddd93e"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723e3b9374c1ce1b53564c863d1a6b2f1dc4e97b1c178d9b643b191d8b1be738"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:876eb185911c8b95342b50a8c4435e1c625944b698a5b4a978ad2ffe74502908"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd49af453e590884d9cdad3586415922a8e9bb669d874ee1dc55d2bc425aacd"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-win32.whl", hash = "sha256:e4ef8cb3c5b326f839bfeb6af5f406ba02ad69a78c7aac0fbeeba994ad9bb48a"}, + {file = "SQLAlchemy-1.4.42-cp39-cp39-win_amd64.whl", hash = "sha256:5f966b64c852592469a7eb759615bbd351571340b8b344f1d3fa2478b5a4c934"}, + {file = "SQLAlchemy-1.4.42.tar.gz", hash = "sha256:177e41914c476ed1e1b77fd05966ea88c094053e17a85303c4ce007f88eff363"}, ] sqlalchemy2-stubs = [ {file = "sqlalchemy2-stubs-0.0.2a29.tar.gz", hash = "sha256:1bbc6aebd76db7c0351a9f45cc1c4e8ac335ba150094c2af091e8b87b9118419"}, From 71e80f3829bac6cd22b2518566c8138deefe6ed8 Mon Sep 17 00:00:00 2001 From: "Edgar R. M" Date: Tue, 18 Oct 2022 14:32:14 -0500 Subject: [PATCH 09/12] ci: Add Poetry pre-commit hooks (#746) * ci: Add Poetry pre-commit hooks ci: Add Poetry pre-commit hooks chore(deps-dev): Bump xdoctest from 1.0.0 to 1.0.1 (#805) Bumps [xdoctest](https://github.com/Erotemic/xdoctest) from 1.0.0 to 1.0.1. - [Release notes](https://github.com/Erotemic/xdoctest/releases) - [Changelog](https://github.com/Erotemic/xdoctest/blob/main/CHANGELOG.md) - [Commits](https://github.com/Erotemic/xdoctest/compare/v1.0.0...v1.0.1) --- updated-dependencies: - dependency-name: xdoctest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Edgar R. M chore(deps-dev): Bump types-requests from 2.28.0 to 2.28.1 (#810) Bumps [types-requests](https://github.com/python/typeshed) from 2.28.0 to 2.28.1. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * pre-commite update * don't run poetry-lock pre-commit hook in CI * Bump poetry pre-commit hook Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8c1ce694a..b65d61236 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,6 +2,7 @@ ci: autofix_prs: false autoupdate_schedule: weekly autoupdate_commit_msg: 'chore: pre-commit autoupdate' + skip: [poetry-lock] repos: - repo: https://github.com/pre-commit/pre-commit-hooks @@ -65,3 +66,10 @@ repos: singer_sdk/helpers/_simpleeval.py| tests/core/test_simpleeval.py )$ + +- repo: https://github.com/python-poetry/poetry + rev: 1.2.2 + hooks: + - id: poetry-check + - id: poetry-lock + args: [--no-update] From f2bd0db06ed5e9146a1337cf7a72c96873dc346f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Oct 2022 14:46:00 -0500 Subject: [PATCH 10/12] chore(deps): Bump pytz from 2022.4 to 2022.5 (#1083) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Edgar R. M --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3beac0522..7c28bad1d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -879,7 +879,7 @@ unidecode = ["Unidecode (>=1.1.1)"] [[package]] name = "pytz" -version = "2022.4" +version = "2022.5" description = "World timezone definitions, modern and historical" category = "main" optional = false @@ -1992,8 +1992,8 @@ python-slugify = [ {file = "python_slugify-6.1.2-py2.py3-none-any.whl", hash = "sha256:7b2c274c308b62f4269a9ba701aa69a797e9bca41aeee5b3a9e79e36b6656927"}, ] pytz = [ - {file = "pytz-2022.4-py2.py3-none-any.whl", hash = "sha256:2c0784747071402c6e99f0bafdb7da0fa22645f06554c7ae06bf6358897e9c91"}, - {file = "pytz-2022.4.tar.gz", hash = "sha256:48ce799d83b6f8aab2020e369b627446696619e79645419610b9facd909b3174"}, + {file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"}, + {file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"}, ] pytzdata = [ {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, From fab2cc3ea3b37c9d7d7cbc07985e2f1096e139e2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 19 Oct 2022 11:00:39 -0500 Subject: [PATCH 11/12] chore(deps-dev): Bump types-pytz from 2022.4.0.0 to 2022.5.0.0 (#1085) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7c28bad1d..b2dded7c7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1207,7 +1207,7 @@ python-versions = "*" [[package]] name = "types-pytz" -version = "2022.4.0.0" +version = "2022.5.0.0" description = "Typing stubs for pytz" category = "dev" optional = false @@ -1331,7 +1331,7 @@ docs = ["sphinx", "sphinx-rtd-theme", "sphinx-copybutton", "myst-parser", "sphin [metadata] lock-version = "1.1" python-versions = "<3.11,>=3.7.1" -content-hash = "bb115a6f5444238ab0eca2a29d67d26c9c360ed1bb92b966e7eefbade7feba21" +content-hash = "7e8ebd0efd632715c51b0db3936d248869e168548bc3f4399695c0a5d56985e1" [metadata.files] alabaster = [ @@ -2263,8 +2263,8 @@ types-python-dateutil = [ {file = "types_python_dateutil-2.8.19.2-py3-none-any.whl", hash = "sha256:3f4dbe465e7e0c6581db11fd7a4855d1355b78712b3f292bd399cd332247e9c0"}, ] types-pytz = [ - {file = "types-pytz-2022.4.0.0.tar.gz", hash = "sha256:17d66e4b16e80ceae0787726f3a22288df7d3f9fdebeb091dc64b92c0e4ea09d"}, - {file = "types_pytz-2022.4.0.0-py3-none-any.whl", hash = "sha256:950b0f3d64ed5b03a3e29c1e38fe2be8371c933c8e97922d0352345336eb8af4"}, + {file = "types-pytz-2022.5.0.0.tar.gz", hash = "sha256:0c163b15d3e598e6cc7074a99ca9ec72b25dc1b446acc133b827667af0b7b09a"}, + {file = "types_pytz-2022.5.0.0-py3-none-any.whl", hash = "sha256:a8e1fe6a1b270fbfaf2553b20ad0f1316707cc320e596da903bb17d7373fed2d"}, ] types-PyYAML = [ {file = "types-PyYAML-6.0.12.tar.gz", hash = "sha256:f6f350418125872f3f0409d96a62a5a5ceb45231af5cc07ee0034ec48a3c82fa"}, diff --git a/pyproject.toml b/pyproject.toml index a9dae0e53..e0ed8942b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,7 +90,7 @@ viztracer = "^0.15.4" requests-mock = "^1.10.0" sqlalchemy2-stubs = {version = "^0.0.2a29", allow-prereleases = true} types-python-dateutil = "^2.8.19" -types-pytz = "^2022.4.0.0" +types-pytz = "^2022.5.0.0" types-requests = "^2.28.11" types-simplejson = "^3.17.7" types-PyYAML = "^6.0.12" From 2721dc5d00ceef91617ca91d372ad6db6f71f830 Mon Sep 17 00:00:00 2001 From: Ken Payne Date: Wed, 19 Oct 2022 23:28:55 +0100 Subject: [PATCH 12/12] fix: create schema and table on `add_sink` (#1036) * start on schema and table creation on * linting * add default schema name * add schema to table metadata * Add missing import for `singer_sdk.helpers._catalog` * undo connection module * fix copy-paste formatting * fix test * more connector changes * fix docstring * add schema creation test * remove create_table_with_records method * Update singer_sdk/sinks/sql.py Co-authored-by: Aaron ("AJ") Steers Co-authored-by: Edgar R. M Co-authored-by: Aaron ("AJ") Steers --- singer_sdk/sinks/core.py | 8 +++ singer_sdk/sinks/sql.py | 118 +++++++++++++++++--------------------- singer_sdk/streams/sql.py | 73 ++++++++++++++++------- singer_sdk/target_base.py | 7 ++- tests/core/test_sqlite.py | 36 ++++++++++++ 5 files changed, 152 insertions(+), 90 deletions(-) diff --git a/singer_sdk/sinks/core.py b/singer_sdk/sinks/core.py index d3f8badad..18487546c 100644 --- a/singer_sdk/sinks/core.py +++ b/singer_sdk/sinks/core.py @@ -423,6 +423,14 @@ def activate_version(self, new_version: int) -> None: "Ignoring." ) + def setup(self) -> None: + """Perform any setup actions at the beginning of a Stream. + + Setup is executed once per Sink instance, after instantiation. If a Schema + change is detected, a new Sink is instantiated and this method is called again. + """ + pass + def clean_up(self) -> None: """Perform any clean up actions required at end of a stream. diff --git a/singer_sdk/sinks/sql.py b/singer_sdk/sinks/sql.py index 5f37a0236..c3455d5df 100644 --- a/singer_sdk/sinks/sql.py +++ b/singer_sdk/sinks/sql.py @@ -10,7 +10,7 @@ from singer_sdk.plugin_base import PluginBase from singer_sdk.sinks.batch import BatchSink -from singer_sdk.streams.sql import SQLConnector +from singer_sdk.streams import SQLConnector class SQLSink(BatchSink): @@ -38,11 +38,7 @@ def __init__( connector: Optional connector to reuse. """ self._connector: SQLConnector - if connector: - self._connector = connector - else: - self._connector = self.connector_class(dict(target.config)) - + self._connector = connector or self.connector_class(dict(target.config)) super().__init__(target, stream_name, schema, key_properties) @property @@ -65,103 +61,93 @@ def connection(self) -> sqlalchemy.engine.Connection: @property def table_name(self) -> str: - """Returns the table name, with no schema or database part. + """Return the table name, with no schema or database part. Returns: The target table name. """ parts = self.stream_name.split("-") - - if len(parts) == 1: - return self.stream_name - else: - return parts[-1] + return self.stream_name if len(parts) == 1 else parts[-1] @property def schema_name(self) -> Optional[str]: - """Returns the schema name or `None` if using names with no schema part. + """Return the schema name or `None` if using names with no schema part. Returns: The target schema name. """ - return None # Assumes single-schema target context. + parts = self.stream_name.split("-") + if len(parts) in {2, 3}: + # Stream name is a two-part or three-part identifier. + # Use the second-to-last part as the schema name. + return parts[-2] + + # Schema name not detected. + return None @property def database_name(self) -> Optional[str]: - """Returns the DB name or `None` if using names with no database part. + """Return the DB name or `None` if using names with no database part. Returns: The target database name. """ return None # Assumes single-DB target context. - def process_batch(self, context: dict) -> None: - """Process a batch with the given batch context. - - Writes a batch to the SQL target. Developers may override this method - in order to provide a more efficient upload/upsert process. + @property + def full_table_name(self) -> str: + """Return the fully qualified table name. - Args: - context: Stream partition or context dictionary. + Returns: + The fully qualified table name. """ - # If duplicates are merged, these can be tracked via - # :meth:`~singer_sdk.Sink.tally_duplicate_merged()`. - self.connector.prepare_table( - full_table_name=self.full_table_name, - schema=self.schema, - primary_keys=self.key_properties, - as_temp_table=False, - ) - self.bulk_insert_records( - full_table_name=self.full_table_name, - schema=self.schema, - records=context["records"], + return self.connector.get_fully_qualified_name( + table_name=self.table_name, + schema_name=self.schema_name, + db_name=self.database_name, ) @property - def full_table_name(self) -> str: - """Gives the fully qualified table name. + def full_schema_name(self) -> str: + """Return the fully qualified schema name. Returns: - The fully qualified table name. + The fully qualified schema name. """ return self.connector.get_fully_qualified_name( - self.table_name, - self.schema_name, - self.database_name, + schema_name=self.schema_name, db_name=self.database_name ) - def create_table_with_records( - self, - full_table_name: Optional[str], - schema: dict, - records: Iterable[Dict[str, Any]], - primary_keys: Optional[List[str]] = None, - partition_keys: Optional[List[str]] = None, - as_temp_table: bool = False, - ) -> None: - """Create an empty table. + def setup(self) -> None: + """Set up Sink. - Args: - full_table_name: the target table name. - schema: the JSON schema for the new table. - records: records to load. - primary_keys: list of key properties. - partition_keys: list of partition keys. - as_temp_table: True to create a temp table. + This method is called on Sink creation, and creates the required Schema and + Table entities in the target database. """ - full_table_name = full_table_name or self.full_table_name - if primary_keys is None: - primary_keys = self.key_properties - partition_keys = partition_keys or None + if self.schema_name: + self.connector.prepare_schema(self.schema_name) self.connector.prepare_table( - full_table_name=full_table_name, - primary_keys=primary_keys, - schema=schema, - as_temp_table=as_temp_table, + full_table_name=self.full_table_name, + schema=self.schema, + primary_keys=self.key_properties, + as_temp_table=False, ) + + def process_batch(self, context: dict) -> None: + """Process a batch with the given batch context. + + Writes a batch to the SQL target. Developers may override this method + in order to provide a more efficient upload/upsert process. + + Args: + context: Stream partition or context dictionary. + """ + # If duplicates are merged, these can be tracked via + # :meth:`~singer_sdk.Sink.tally_duplicate_merged()`. self.bulk_insert_records( - full_table_name=full_table_name, schema=schema, records=records + full_table_name=self.full_table_name, + schema=self.schema, + records=context["records"], ) def generate_insert_statement( diff --git a/singer_sdk/streams/sql.py b/singer_sdk/streams/sql.py index 6a0392485..eead34c97 100644 --- a/singer_sdk/streams/sql.py +++ b/singer_sdk/streams/sql.py @@ -26,7 +26,6 @@ class SQLConnector: The connector class serves as a wrapper around the SQL connection. The functions of the connector are: - - connecting to the source - generating SQLAlchemy connection and engine objects - discovering schema catalog entries @@ -76,6 +75,7 @@ def create_sqlalchemy_connection(self) -> sqlalchemy.engine.Connection: By default this will create using the sqlalchemy `stream_results=True` option described here: + https://docs.sqlalchemy.org/en/14/core/connections.html#using-server-side-cursors-a-k-a-stream-results Developers may override this method if their provider does not support @@ -191,7 +191,6 @@ def to_sql_type(jsonschema_type: dict) -> sqlalchemy.types.TypeEngine: Developers may override this method to accept additional input argument types, to support non-standard types, or to provide custom typing logic. - If overriding this method, developers should call the default implementation from the base class for all unhandled cases. @@ -205,7 +204,7 @@ def to_sql_type(jsonschema_type: dict) -> sqlalchemy.types.TypeEngine: @staticmethod def get_fully_qualified_name( - table_name: str, + table_name: str | None = None, schema_name: str | None = None, db_name: str | None = None, delimiter: str = ".", @@ -219,23 +218,23 @@ def get_fully_qualified_name( delimiter: Generally: '.' for SQL names and '-' for Singer names. Raises: - ValueError: If table_name is not provided or if neither schema_name or - db_name are provided. + ValueError: If all 3 name parts not supplied. Returns: The fully qualified name as a string. """ - if db_name and schema_name: - result = delimiter.join([db_name, schema_name, table_name]) - elif db_name: - result = delimiter.join([db_name, table_name]) - elif schema_name: - result = delimiter.join([schema_name, table_name]) - elif table_name: - result = table_name - else: + parts = [] + + if db_name: + parts.append(db_name) + if schema_name: + parts.append(schema_name) + if table_name: + parts.append(table_name) + + if not parts: raise ValueError( - "Could not generate fully qualified name for stream: " + "Could not generate fully qualified name: " + ":".join( [ db_name or "(unknown-db)", @@ -245,7 +244,7 @@ def get_fully_qualified_name( ) ) - return result + return delimiter.join(parts) @property def _dialect(self) -> sqlalchemy.engine.Dialect: @@ -487,6 +486,18 @@ def table_exists(self, full_table_name: str) -> bool: sqlalchemy.inspect(self._engine).has_table(full_table_name), ) + def schema_exists(self, schema_name: str) -> bool: + """Determine if the target database schema already exists. + + Args: + schema_name: The target database schema name. + + Returns: + True if the database schema exists, False if not. + """ + schema_names = sqlalchemy.inspect(self._engine).get_schema_names() + return schema_name in schema_names + def get_table_columns( self, full_table_name: str, column_names: list[str] | None = None ) -> dict[str, sqlalchemy.Column]: @@ -547,6 +558,14 @@ def column_exists(self, full_table_name: str, column_name: str) -> bool: """ return column_name in self.get_table_columns(full_table_name) + def create_schema(self, schema_name: str) -> None: + """Create target schema. + + Args: + schema_name: The target schema to create. + """ + self._engine.execute(sqlalchemy.schema.CreateSchema(schema_name)) + def create_empty_table( self, full_table_name: str, @@ -573,7 +592,8 @@ def create_empty_table( _ = partition_keys # Not supported in generic implementation. - meta = sqlalchemy.MetaData() + _, schema_name, table_name = self.parse_full_table_name(full_table_name) + meta = sqlalchemy.MetaData(schema=schema_name) columns: list[sqlalchemy.Column] = [] primary_keys = primary_keys or [] try: @@ -592,7 +612,7 @@ def create_empty_table( ) ) - _ = sqlalchemy.Table(full_table_name, meta, *columns) + _ = sqlalchemy.Table(table_name, meta, *columns) meta.create_all(self._engine) def _create_empty_column( @@ -630,6 +650,16 @@ def _create_empty_column( ) ) + def prepare_schema(self, schema_name: str) -> None: + """Create the target database schema. + + Args: + schema_name: The target schema name. + """ + schema_exists = self.schema_exists(schema_name) + if not schema_exists: + self.create_schema(schema_name) + def prepare_table( self, full_table_name: str, @@ -788,6 +818,7 @@ def _sort_types( For example, [Smallint, Integer, Datetime, String, Double] would become [Unicode, String, Double, Integer, Smallint, Datetime]. + String types will be listed first, then decimal types, then integer types, then bool types, and finally datetime and date. Higher precision, scale, and length will be sorted earlier. @@ -823,7 +854,7 @@ def _get_type_sort_key( def _get_column_type( self, full_table_name: str, column_name: str ) -> sqlalchemy.types.TypeEngine: - """Gets the SQL type of the declared column. + """Get the SQL type of the declared column. Args: full_table_name: The name of the table. @@ -937,7 +968,7 @@ def _singer_catalog_entry(self) -> CatalogEntry: @property def connector(self) -> SQLConnector: - """The connector object. + """Return a connector object. Returns: The connector object. @@ -946,7 +977,7 @@ def connector(self) -> SQLConnector: @property def metadata(self) -> MetadataMapping: - """The Singer metadata. + """Return the Singer metadata. Metadata from an input catalog will override standard metadata. diff --git a/singer_sdk/target_base.py b/singer_sdk/target_base.py index 0b21b04c5..a1ddd5e78 100644 --- a/singer_sdk/target_base.py +++ b/singer_sdk/target_base.py @@ -224,14 +224,15 @@ def add_sink( """ self.logger.info(f"Initializing '{self.name}' target sink...") sink_class = self.get_sink_class(stream_name=stream_name) - result = sink_class( + sink = sink_class( target=self, stream_name=stream_name, schema=schema, key_properties=key_properties, ) - self._sinks_active[stream_name] = result - return result + sink.setup() + self._sinks_active[stream_name] = sink + return sink def _assert_sink_exists(self, stream_name: str) -> None: """Raise a RecordsWithoutSchemaException exception if stream doesn't exist. diff --git a/tests/core/test_sqlite.py b/tests/core/test_sqlite.py index 5e76d8168..b9c7c82ca 100644 --- a/tests/core/test_sqlite.py +++ b/tests/core/test_sqlite.py @@ -10,6 +10,7 @@ from uuid import uuid4 import pytest +import sqlalchemy from samples.sample_tap_sqlite import SQLiteConnector, SQLiteTap from samples.sample_target_csv.csv_target import SampleTargetCSV @@ -265,6 +266,41 @@ def test_sync_sqlite_to_sqlite( assert line_num > 0, "No lines read." +def test_sqlite_schema_addition( + sqlite_target_test_config: dict, sqlite_sample_target: SQLTarget +): + """Test that SQL-based targets attempt to create new schema if included in stream name.""" + schema_name = f"test_schema_{str(uuid4()).split('-')[-1]}" + table_name = f"zzz_tmp_{str(uuid4()).split('-')[-1]}" + test_stream_name = f"{schema_name}-{table_name}" + schema_message = { + "type": "SCHEMA", + "stream": test_stream_name, + "schema": { + "type": "object", + "properties": {"col_a": th.StringType().to_dict()}, + }, + } + tap_output = "\n".join( + json.dumps(msg) + for msg in [ + schema_message, + { + "type": "RECORD", + "stream": test_stream_name, + "record": {"col_a": "samplerow1"}, + }, + ] + ) + # sqlite doesn't support schema creation + with pytest.raises(sqlalchemy.exc.OperationalError) as excinfo: + target_sync_test( + sqlite_sample_target, input=StringIO(tap_output), finalize=True + ) + # check the target at least tried to create the schema + assert excinfo.value.statement == f"CREATE SCHEMA {schema_name}" + + def test_sqlite_column_addition(sqlite_sample_target: SQLTarget): """End-to-end-to-end test for SQLite tap and target.