From c25ac8a748092a717430a7b7805d1418d98c6775 Mon Sep 17 00:00:00 2001 From: Augustin Date: Fri, 28 Oct 2022 11:45:29 +0200 Subject: [PATCH 01/13] SAT: make `expect_records` mandatory in `high` `test_strictness_level` (#18497) --- .../bases/source-acceptance-test/.coveragerc | 4 +- .../bases/source-acceptance-test/CHANGELOG.md | 3 + .../bases/source-acceptance-test/Dockerfile | 2 +- .../source_acceptance_test/config.py | 11 +- .../source_acceptance_test/conftest.py | 69 ++++++-- .../source_acceptance_test/tests/test_core.py | 30 ++-- .../unit_tests/test_config.py | 15 ++ .../unit_tests/test_core.py | 51 +----- .../unit_tests/test_global_fixtures.py | 167 ++++++++++++++++++ .../acceptance-test-config.yml | 8 +- .../source-acceptance-tests-reference.md | 7 + 11 files changed, 284 insertions(+), 83 deletions(-) create mode 100644 airbyte-integrations/bases/source-acceptance-test/unit_tests/test_global_fixtures.py diff --git a/airbyte-integrations/bases/source-acceptance-test/.coveragerc b/airbyte-integrations/bases/source-acceptance-test/.coveragerc index 11957a0e6499..2cda014ccf5e 100644 --- a/airbyte-integrations/bases/source-acceptance-test/.coveragerc +++ b/airbyte-integrations/bases/source-acceptance-test/.coveragerc @@ -1,8 +1,8 @@ [report] # show lines missing coverage show_missing = true -# coverage 64% measured on 62303a85def89450d2e46573a3d96cd326f2e921 (2022-08-09) +# coverage 74% measured on 4977ac2c527f03c15ce0094cfd48f6104a0fd82f (2022-10-26) # This number should probably never be adjusted down, only up i.e: we should only ever increase our test coverage -fail_under = 64 +fail_under = 74 skip_covered = true skip_empty = true diff --git a/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md b/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md index 8db3b2cf26bf..c8b79bcb78d8 100644 --- a/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md +++ b/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.2.15 +Make `expect_records` mandatory in `high` `test_strictness_level`. [#18497](https://github.com/airbytehq/airbyte/pull/18497/). + ## 0.2.14 Fail basic read in `high` `test_strictness_level` if no `bypass_reason` is set on empty_streams. [#18425](https://github.com/airbytehq/airbyte/pull/18425/). diff --git a/airbyte-integrations/bases/source-acceptance-test/Dockerfile b/airbyte-integrations/bases/source-acceptance-test/Dockerfile index 92171914db04..96225d466ae4 100644 --- a/airbyte-integrations/bases/source-acceptance-test/Dockerfile +++ b/airbyte-integrations/bases/source-acceptance-test/Dockerfile @@ -33,7 +33,7 @@ COPY pytest.ini setup.py ./ COPY source_acceptance_test ./source_acceptance_test RUN pip install . -LABEL io.airbyte.version=0.2.14 +LABEL io.airbyte.version=0.2.15 LABEL io.airbyte.name=airbyte/source-acceptance-test ENTRYPOINT ["python", "-m", "pytest", "-p", "source_acceptance_test.plugin", "-r", "fEsx"] diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py index a172c2ebab90..5bd5307e4fe9 100644 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py +++ b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py @@ -73,7 +73,8 @@ class ExpectedRecordsConfig(BaseModel): class Config: extra = "forbid" - path: Path = Field(description="File with expected records") + bypass_reason: Optional[str] = Field(description="Reason why this test is bypassed.") + path: Optional[Path] = Field(description="File with expected records") extra_fields: bool = Field(False, description="Allow records to have other fields") exact_order: bool = Field(False, description="Ensure that records produced in exact same order") extra_records: bool = Field( @@ -92,6 +93,14 @@ def validate_extra_records(cls, extra_records, values): raise ValueError("extra_records must be off if extra_fields enabled") return extra_records + @validator("path", always=True) + def no_bypass_reason_when_path_is_set(cls, path, values): + if path and values.get("bypass_reason"): + raise ValueError("You can't set a bypass_reason if a path is set") + if not path and not values.get("bypass_reason"): + raise ValueError("A path or a bypass_reason must be set") + return path + class EmptyStreamConfiguration(BaseConfig): name: str diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/conftest.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/conftest.py index ac0f79760c1d..72a00b883405 100644 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/conftest.py +++ b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/conftest.py @@ -10,7 +10,7 @@ from logging import Logger from pathlib import Path from subprocess import STDOUT, check_output, run -from typing import Any, List, MutableMapping, Optional +from typing import Any, List, MutableMapping, Optional, Set import pytest from airbyte_cdk.models import ( @@ -24,7 +24,8 @@ ) from docker import errors from source_acceptance_test.base import BaseTest -from source_acceptance_test.config import Config +from source_acceptance_test.config import Config, EmptyStreamConfiguration +from source_acceptance_test.tests import TestBasicRead from source_acceptance_test.utils import ConnectorRunner, SecretDict, filter_output, load_config, load_yaml_or_json_path @@ -167,14 +168,62 @@ def pull_docker_image(acceptance_test_config) -> None: pytest.exit(f"Docker image `{image_name}` not found, please check your {config_filename} file", returncode=1) -@pytest.fixture(name="expected_records") -def expected_records_fixture(inputs, base_path) -> List[AirbyteRecordMessage]: - expect_records = getattr(inputs, "expect_records") - if not expect_records: - return [] - - with open(str(base_path / getattr(expect_records, "path"))) as f: - return [AirbyteRecordMessage.parse_raw(line) for line in f] +@pytest.fixture(name="empty_streams") +def empty_streams_fixture(inputs, test_strictness_level) -> Set[EmptyStreamConfiguration]: + empty_streams = getattr(inputs, "empty_streams", set()) + if test_strictness_level is Config.TestStrictnessLevel.high and empty_streams: + all_empty_streams_have_bypass_reasons = all([bool(empty_stream.bypass_reason) for empty_stream in inputs.empty_streams]) + if not all_empty_streams_have_bypass_reasons: + pytest.fail("A bypass_reason must be filled in for all empty streams when test_strictness_level is set to high.") + return empty_streams + + +@pytest.fixture(name="expected_records_by_stream") +def expected_records_by_stream_fixture( + test_strictness_level: Config.TestStrictnessLevel, + configured_catalog: ConfiguredAirbyteCatalog, + empty_streams: Set[EmptyStreamConfiguration], + inputs, + base_path, +) -> MutableMapping[str, List[MutableMapping]]: + def enforce_high_strictness_level_rules(expect_records_config, configured_catalog, empty_streams, records_by_stream) -> Optional[str]: + error_prefix = "High strictness level error: " + if expect_records_config is None: + pytest.fail(error_prefix + "expect_records must be configured for the basic_read test.") + elif expect_records_config.path: + not_seeded_streams = find_not_seeded_streams(configured_catalog, empty_streams, records_by_stream) + if not_seeded_streams: + pytest.fail( + error_prefix + + f"{', '.join(not_seeded_streams)} streams are declared in the catalog but do not have expected records. Please add expected records to {expect_records_config.path} or declare these streams in empty_streams." + ) + + expect_records_config = inputs.expect_records + + expected_records_by_stream = {} + if expect_records_config: + if expect_records_config.path: + expected_records_file_path = str(base_path / expect_records_config.path) + with open(expected_records_file_path, "r") as f: + all_records = [AirbyteRecordMessage.parse_raw(line) for line in f] + expected_records_by_stream = TestBasicRead.group_by_stream(all_records) + + if test_strictness_level is Config.TestStrictnessLevel.high: + enforce_high_strictness_level_rules(expect_records_config, configured_catalog, empty_streams, expected_records_by_stream) + return expected_records_by_stream + + +def find_not_seeded_streams( + configured_catalog: ConfiguredAirbyteCatalog, + empty_streams: Set[EmptyStreamConfiguration], + records_by_stream: MutableMapping[str, List[MutableMapping]], +) -> Set[str]: + stream_names_in_catalog = set([configured_stream.stream.name for configured_stream in configured_catalog.streams]) + empty_streams_names = set([stream.name for stream in empty_streams]) + expected_record_stream_names = set(records_by_stream.keys()) + expected_seeded_stream_names = stream_names_in_catalog - empty_streams_names + + return expected_seeded_stream_names - expected_record_stream_names @pytest.fixture(name="cached_schemas", scope="session") diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py index e8c3cae89639..08b5f5717f4d 100644 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py +++ b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py @@ -27,7 +27,7 @@ from docker.errors import ContainerError from jsonschema._utils import flatten from source_acceptance_test.base import BaseTest -from source_acceptance_test.config import BasicReadTestConfig, Config, ConnectionTestConfig, DiscoveryTestConfig, SpecTestConfig +from source_acceptance_test.config import BasicReadTestConfig, ConnectionTestConfig, DiscoveryTestConfig, SpecTestConfig from source_acceptance_test.utils import ConnectorRunner, SecretDict, filter_output, make_hashable, verify_records_schema from source_acceptance_test.utils.backward_compatibility import CatalogDiffChecker, SpecDiffChecker, validate_previous_configs from source_acceptance_test.utils.common import find_all_values_for_key_in_schema, find_keyword_schema @@ -435,14 +435,17 @@ def _validate_field_appears_at_least_once(self, records: List, configured_catalo assert not stream_name_to_empty_fields_mapping, msg def _validate_expected_records( - self, records: List[AirbyteRecordMessage], expected_records: List[AirbyteRecordMessage], flags, detailed_logger: Logger + self, + records: List[AirbyteRecordMessage], + expected_records_by_stream: MutableMapping[str, List[MutableMapping]], + flags, + detailed_logger: Logger, ): """ We expect some records from stream to match expected_records, partially or fully, in exact or any order. """ actual_by_stream = self.group_by_stream(records) - expected_by_stream = self.group_by_stream(expected_records) - for stream_name, expected in expected_by_stream.items(): + for stream_name, expected in expected_records_by_stream.items(): actual = actual_by_stream.get(stream_name, []) detailed_logger.info(f"Actual records for stream {stream_name}:") detailed_logger.log_json_list(actual) @@ -464,12 +467,10 @@ def test_read( connector_config, configured_catalog, inputs: BasicReadTestConfig, - expected_records: List[AirbyteRecordMessage], + expected_records_by_stream: MutableMapping[str, List[MutableMapping]], docker_runner: ConnectorRunner, detailed_logger, - test_strictness_level: Config.TestStrictnessLevel, ): - self.enforce_strictness_level(test_strictness_level, inputs) output = docker_runner.call_read(connector_config, configured_catalog) records = [message.record for message in filter_output(output, Type.RECORD)] @@ -489,9 +490,12 @@ def test_read( if inputs.validate_data_points: self._validate_field_appears_at_least_once(records=records, configured_catalog=configured_catalog) - if expected_records: + if expected_records_by_stream: self._validate_expected_records( - records=records, expected_records=expected_records, flags=inputs.expect_records, detailed_logger=detailed_logger + records=records, + expected_records_by_stream=expected_records_by_stream, + flags=inputs.expect_records, + detailed_logger=detailed_logger, ) def test_airbyte_trace_message_on_failure(self, connector_config, inputs: BasicReadTestConfig, docker_runner: ConnectorRunner): @@ -581,11 +585,3 @@ def group_by_stream(records: List[AirbyteRecordMessage]) -> MutableMapping[str, result[record.stream].append(record.data) return result - - @staticmethod - def enforce_strictness_level(test_strictness_level: Config.TestStrictnessLevel, inputs: BasicReadTestConfig): - if test_strictness_level is Config.TestStrictnessLevel.high: - if inputs.empty_streams: - all_empty_streams_have_bypass_reasons = all([bool(empty_stream.bypass_reason) for empty_stream in inputs.empty_streams]) - if not all_empty_streams_have_bypass_reasons: - pytest.fail("A bypass_reason must be filled in for all empty streams when test_strictness_level is set to high.") diff --git a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_config.py b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_config.py index 938791090943..907db03ae8eb 100644 --- a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_config.py +++ b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_config.py @@ -197,3 +197,18 @@ def test_config_parsing(self, raw_config, expected_output_config, expected_error def test_legacy_config_migration(self, legacy_config, expected_parsed_config): assert config.Config.is_legacy(legacy_config) assert config.Config.parse_obj(legacy_config) == expected_parsed_config + + +class TestExpectedRecordsConfig: + @pytest.mark.parametrize( + "path, bypass_reason, expectation", + [ + pytest.param("my_path", None, does_not_raise()), + pytest.param(None, "Good bypass reason", does_not_raise()), + pytest.param(None, None, pytest.raises(ValidationError)), + pytest.param("my_path", "Good bypass reason", pytest.raises(ValidationError)), + ], + ) + def test_bypass_reason_behavior(self, path, bypass_reason, expectation): + with expectation: + config.ExpectedRecordsConfig(path=path, bypass_reason=bypass_reason) diff --git a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_core.py b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_core.py index 2b6b43823764..8082a6e02e27 100644 --- a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_core.py +++ b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_core.py @@ -20,8 +20,7 @@ TraceType, Type, ) -from source_acceptance_test.config import BasicReadTestConfig, Config, EmptyStreamConfiguration -from source_acceptance_test.tests import test_core +from source_acceptance_test.config import BasicReadTestConfig from source_acceptance_test.tests.test_core import TestBasicRead as _TestBasicRead from source_acceptance_test.tests.test_core import TestDiscovery as _TestDiscovery @@ -260,7 +259,7 @@ def test_additional_properties_is_true(discovered_catalog, expectation): ), ], ) -def test_read(mocker, schema, record, expectation): +def test_read(schema, record, expectation): catalog = ConfiguredAirbyteCatalog( streams=[ ConfiguredAirbyteStream( @@ -276,10 +275,8 @@ def test_read(mocker, schema, record, expectation): AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="test_stream", data=record, emitted_at=111)) ] t = _TestBasicRead() - t.enforce_strictness_level = mocker.Mock() with expectation: - t.test_read(None, catalog, input_config, [], docker_runner_mock, MagicMock(), Config.TestStrictnessLevel.low) - t.enforce_strictness_level.assert_called_with(Config.TestStrictnessLevel.low, input_config) + t.test_read(None, catalog, input_config, [], docker_runner_mock, MagicMock()) @pytest.mark.parametrize( @@ -844,45 +841,3 @@ def test_validate_field_appears_at_least_once(records, configured_catalog, expec t._validate_field_appears_at_least_once(records=records, configured_catalog=configured_catalog) else: t._validate_field_appears_at_least_once(records=records, configured_catalog=configured_catalog) - - -@pytest.mark.parametrize( - "test_strictness_level, basic_read_test_config, expect_test_failure", - [ - pytest.param( - Config.TestStrictnessLevel.low, - BasicReadTestConfig(config_path="config_path", empty_streams={EmptyStreamConfiguration(name="my_empty_stream")}), - False, - id="[LOW test strictness level] Empty streams can be declared without bypass_reason.", - ), - pytest.param( - Config.TestStrictnessLevel.low, - BasicReadTestConfig( - config_path="config_path", empty_streams={EmptyStreamConfiguration(name="my_empty_stream", bypass_reason="good reason")} - ), - False, - id="[LOW test strictness level] Empty streams can be declared with a bypass_reason.", - ), - pytest.param( - Config.TestStrictnessLevel.high, - BasicReadTestConfig(config_path="config_path", empty_streams={EmptyStreamConfiguration(name="my_empty_stream")}), - True, - id="[HIGH test strictness level] Empty streams can't be declared without bypass_reason.", - ), - pytest.param( - Config.TestStrictnessLevel.high, - BasicReadTestConfig( - config_path="config_path", empty_streams={EmptyStreamConfiguration(name="my_empty_stream", bypass_reason="good reason")} - ), - False, - id="[HIGH test strictness level] Empty streams can be declared with a bypass_reason.", - ), - ], -) -def test_enforce_strictness_level(mocker, test_strictness_level, basic_read_test_config, expect_test_failure): - mocker.patch.object(test_core, "pytest") - assert _TestBasicRead.enforce_strictness_level(test_strictness_level, basic_read_test_config) is None - if expect_test_failure: - test_core.pytest.fail.assert_called_once() - else: - test_core.pytest.fail.assert_not_called() diff --git a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_global_fixtures.py b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_global_fixtures.py new file mode 100644 index 000000000000..cdd8b006b3c6 --- /dev/null +++ b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_global_fixtures.py @@ -0,0 +1,167 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import json + +import pytest +from airbyte_cdk.models import AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, SyncMode +from source_acceptance_test import conftest +from source_acceptance_test.config import BasicReadTestConfig, Config, EmptyStreamConfiguration, ExpectedRecordsConfig + + +@pytest.mark.parametrize( + "test_strictness_level, basic_read_test_config, expect_test_failure", + [ + pytest.param( + Config.TestStrictnessLevel.low, + BasicReadTestConfig(config_path="config_path", empty_streams={EmptyStreamConfiguration(name="my_empty_stream")}), + False, + id="[LOW test strictness level] Empty streams can be declared without bypass_reason.", + ), + pytest.param( + Config.TestStrictnessLevel.low, + BasicReadTestConfig( + config_path="config_path", empty_streams={EmptyStreamConfiguration(name="my_empty_stream", bypass_reason="good reason")} + ), + False, + id="[LOW test strictness level] Empty streams can be declared with a bypass_reason.", + ), + pytest.param( + Config.TestStrictnessLevel.high, + BasicReadTestConfig(config_path="config_path", empty_streams={EmptyStreamConfiguration(name="my_empty_stream")}), + True, + id="[HIGH test strictness level] Empty streams can't be declared without bypass_reason.", + ), + pytest.param( + Config.TestStrictnessLevel.high, + BasicReadTestConfig( + config_path="config_path", empty_streams={EmptyStreamConfiguration(name="my_empty_stream", bypass_reason="good reason")} + ), + False, + id="[HIGH test strictness level] Empty streams can be declared with a bypass_reason.", + ), + ], +) +def test_empty_streams_fixture(mocker, test_strictness_level, basic_read_test_config, expect_test_failure): + mocker.patch.object(conftest.pytest, "fail") + # Pytest prevents fixture to be directly called. Using __wrapped__ allows us to call the actual function before it's been wrapped by the decorator. + assert conftest.empty_streams_fixture.__wrapped__(basic_read_test_config, test_strictness_level) == basic_read_test_config.empty_streams + if expect_test_failure: + conftest.pytest.fail.assert_called_once() + else: + conftest.pytest.fail.assert_not_called() + + +TEST_AIRBYTE_STREAM_A = AirbyteStream(name="test_stream_a", json_schema={"k": "v"}, supported_sync_modes=[SyncMode.full_refresh]) +TEST_AIRBYTE_STREAM_B = AirbyteStream(name="test_stream_b", json_schema={"k": "v"}, supported_sync_modes=[SyncMode.full_refresh]) +TEST_AIRBYTE_STREAM_C = AirbyteStream(name="test_stream_c", json_schema={"k": "v"}, supported_sync_modes=[SyncMode.full_refresh]) + +TEST_CONFIGURED_AIRBYTE_STREAM_A = ConfiguredAirbyteStream( + stream=TEST_AIRBYTE_STREAM_A, + sync_mode=SyncMode.full_refresh, + destination_sync_mode=DestinationSyncMode.overwrite, +) + +TEST_CONFIGURED_AIRBYTE_STREAM_B = ConfiguredAirbyteStream( + stream=TEST_AIRBYTE_STREAM_B, + sync_mode=SyncMode.full_refresh, + destination_sync_mode=DestinationSyncMode.overwrite, +) + +TEST_CONFIGURED_AIRBYTE_STREAM_C = ConfiguredAirbyteStream( + stream=TEST_AIRBYTE_STREAM_C, + sync_mode=SyncMode.full_refresh, + destination_sync_mode=DestinationSyncMode.overwrite, +) + +TEST_CONFIGURED_CATALOG = ConfiguredAirbyteCatalog( + streams=[TEST_CONFIGURED_AIRBYTE_STREAM_A, TEST_CONFIGURED_AIRBYTE_STREAM_B, TEST_CONFIGURED_AIRBYTE_STREAM_C] +) + + +@pytest.mark.parametrize( + "test_strictness_level, configured_catalog, empty_streams, expected_records, expected_records_config, should_fail", + [ + pytest.param( + Config.TestStrictnessLevel.high, + TEST_CONFIGURED_CATALOG, + set(), + [], + None, + True, + id="High strictness level: No expected records configuration -> Failing", + ), + pytest.param( + Config.TestStrictnessLevel.high, + TEST_CONFIGURED_CATALOG, + {EmptyStreamConfiguration(name="test_stream_b"), EmptyStreamConfiguration(name="test_stream_c")}, + [{"stream": "test_stream_a", "data": {"k": "foo"}, "emitted_at": 1634387507000}], + ExpectedRecordsConfig(path="expected_records.json"), + False, + id="High strictness level: test_stream_b and test_stream_c are declared as empty streams, expected records only contains test_stream_a record -> Not failing", + ), + pytest.param( + Config.TestStrictnessLevel.high, + TEST_CONFIGURED_CATALOG, + set(), + [{"stream": "test_stream_a", "data": {"k": "foo"}, "emitted_at": 1634387507000}], + ExpectedRecordsConfig(path="expected_records.json"), + True, + id="High strictness level: test_stream_b and test_stream_c are not declared as empty streams, expected records only contains test_stream_a record -> Failing", + ), + pytest.param( + Config.TestStrictnessLevel.high, + TEST_CONFIGURED_CATALOG, + {EmptyStreamConfiguration(name="test_stream_b")}, + [{"stream": "test_stream_a", "data": {"k": "foo"}, "emitted_at": 1634387507000}], + ExpectedRecordsConfig(path="expected_records.json"), + True, + id="High strictness level: test_stream_b is declared as an empty stream, test_stream_c is not declared as empty streams, expected records only contains test_stream_a record -> Failing", + ), + pytest.param( + Config.TestStrictnessLevel.high, + TEST_CONFIGURED_CATALOG, + set(), + [], + ExpectedRecordsConfig(bypass_reason="A good reason to not have expected records"), + False, + id="High strictness level: Expected records configuration with bypass_reason -> Not failing", + ), + pytest.param( + Config.TestStrictnessLevel.low, + TEST_CONFIGURED_CATALOG, + set(), + [], + None, + False, + id="Low strictness level, no empty stream, no expected records -> Not failing", + ), + pytest.param( + Config.TestStrictnessLevel.low, + TEST_CONFIGURED_CATALOG, + set(), + [{"stream": "test_stream_a", "data": {"k": "foo"}, "emitted_at": 1634387507000}], + ExpectedRecordsConfig(path="expected_records.json"), + False, + id="Low strictness level, no empty stream, incomplete expected records -> Not failing", + ), + ], +) +def test_expected_records_by_stream_fixture( + tmp_path, mocker, test_strictness_level, configured_catalog, empty_streams, expected_records, expected_records_config, should_fail +): + mocker.patch.object(conftest.pytest, "fail") + + base_path = tmp_path + with open(f"{base_path}/expected_records.json", "w") as expected_records_file: + for record in expected_records: + expected_records_file.write(json.dumps(record) + "\n") + + inputs = BasicReadTestConfig(config_path="", empty_streams=empty_streams, expect_records=expected_records_config) + + conftest.expected_records_by_stream_fixture.__wrapped__(test_strictness_level, configured_catalog, empty_streams, inputs, base_path) + if should_fail: + conftest.pytest.fail.assert_called_once() + else: + conftest.pytest.fail.assert_not_called() diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml index 4c3cb0ea4112..4b7cee681080 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml @@ -21,10 +21,10 @@ tests: expect_records: path: "integration_tests/expected_records.txt" incremental: - - config_path: "secrets/service_config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" - threshold_days: 2 + - config_path: "secrets/service_config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + threshold_days: 2 full_refresh: - config_path: "secrets/service_config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md b/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md index 68c231abf9a3..0aa5db52f7e7 100644 --- a/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md +++ b/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md @@ -151,6 +151,7 @@ Set `validate_data_points=True` if possible. This validation is going to be enab | `expect_trace_message_on_failure` | boolean | True | Ensure that a trace message is emitted when the connector crashes | | `expect_records` | object | None | Compare produced records with expected records, see details below | | `expect_records.path` | string | | File with expected records | +| `expect_records.bypass_reason` | string | | Explain why this test is bypassed | | `expect_records.extra_fields` | boolean | False | Allow output records to have other fields i.e: expected records are a subset | | `expect_records.exact_order` | boolean | False | Ensure that records produced in exact same order | | `expect_records.extra_records` | boolean | True | Allow connector to produce extra records, but still enforce all records from the expected file to be produced | @@ -302,3 +303,9 @@ acceptance_tests: timeout_seconds: 1200 ... ``` + +#### Basic read: `expect_records` must be set +In `high` test strictness level we expect the `expect_records` subtest to be set. +If you can't create an `expected_records.json` with all the existing stream you need to declare the missing streams in the `empty_streams` section. +If you can't get an `expected_records.json` file at all, you must fill in a `bypass_reason`. + From d9acf47efa5a7516fb4478196720f9d19f8a42a8 Mon Sep 17 00:00:00 2001 From: Ivica Taseski Date: Fri, 28 Oct 2022 14:16:03 +0200 Subject: [PATCH 02/13] =?UTF-8?q?=F0=9F=8E=89=20New=20destination:=20Yugab?= =?UTF-8?q?ytedb=20(#18039)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add yugabytedb destination connector * add dest def + add changelog * auto-bump connector version Co-authored-by: marcosmarxm Co-authored-by: Marcos Marx Co-authored-by: Octavia Squidington III --- .../src/main/resources/icons/yugabytedb.svg | 409 ++++++++++++++++++ .../seed/destination_definitions.yaml | 7 + .../resources/seed/destination_specs.yaml | 69 +++ .../io/airbyte/db/factory/DatabaseDriver.java | 3 +- .../destination-yugabytedb/.dockerignore | 3 + .../destination-yugabytedb/Dockerfile | 18 + .../destination-yugabytedb/README.md | 68 +++ .../destination-yugabytedb/bootstrap.md | 0 .../destination-yugabytedb/build.gradle | 32 ++ .../destination-yugabytedb/docker-compose.yml | 31 ++ .../yugabytedb/YugabytedbDestination.java | 66 +++ .../YugabytedbNamingTransformer.java | 16 + .../yugabytedb/YugabytedbSqlOperations.java | 58 +++ .../src/main/resources/spec.json | 65 +++ .../yugabytedb/YugabyteDataSource.java | 28 ++ .../YugabytedbContainerInitializr.java | 75 ++++ .../YugabytedbDestinationAcceptanceTest.java | 154 +++++++ .../yugabytedb/YugabytedbDestinationTest.java | 50 +++ .../YugabytedbNamingTransformerTest.java | 27 ++ docs/integrations/README.md | 1 + docs/integrations/destinations/yugabytedb.md | 59 +++ 21 files changed, 1238 insertions(+), 1 deletion(-) create mode 100644 airbyte-config/init/src/main/resources/icons/yugabytedb.svg create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/.dockerignore create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/Dockerfile create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/README.md create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/bootstrap.md create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/build.gradle create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/docker-compose.yml create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestination.java create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformer.java create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbSqlOperations.java create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/main/resources/spec.json create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabyteDataSource.java create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbContainerInitializr.java create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationAcceptanceTest.java create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java create mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java create mode 100644 docs/integrations/destinations/yugabytedb.md diff --git a/airbyte-config/init/src/main/resources/icons/yugabytedb.svg b/airbyte-config/init/src/main/resources/icons/yugabytedb.svg new file mode 100644 index 000000000000..0a493b99787b --- /dev/null +++ b/airbyte-config/init/src/main/resources/icons/yugabytedb.svg @@ -0,0 +1,409 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 02191af24ebc..7a682eded8ee 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -329,3 +329,10 @@ documentationUrl: https://docs.airbyte.com/integrations/destinations/tidb icon: tidb.svg releaseStage: alpha +- name: YugabyteDB + destinationDefinitionId: 2300fdcf-a532-419f-9f24-a014336e7966 + dockerRepository: airbyte/destination-yugabytedb + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/destinations/yugabytedb + icon: yugabytedb.svg + releaseStage: alpha diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index bc7aac2abb09..8c50468aa44c 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -6145,3 +6145,72 @@ supported_destination_sync_modes: - "overwrite" - "append" +- dockerImage: "airbyte/destination-yugabytedb:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/yugabytedb" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Yugabytedb destination spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + additionalProperties: true + properties: + host: + title: "Host" + description: "The Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "The Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + title: "Database" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "Username" + description: "The Username which is used to access the database." + type: "string" + order: 3 + schema: + title: "Default Schema" + description: "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + password: + title: "Password" + description: "The Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + jdbc_url_params: + description: "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "overwrite" + - "append" diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/DatabaseDriver.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/DatabaseDriver.java index 0bd19d2e196e..d8d4fcd1e91a 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/DatabaseDriver.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/DatabaseDriver.java @@ -18,7 +18,8 @@ public enum DatabaseDriver { ORACLE("oracle.jdbc.OracleDriver", "jdbc:oracle:thin:@%s:%d/%s"), POSTGRESQL("org.postgresql.Driver", "jdbc:postgresql://%s:%d/%s"), REDSHIFT("com.amazon.redshift.jdbc.Driver", "jdbc:redshift://%s:%d/%s"), - SNOWFLAKE("net.snowflake.client.jdbc.SnowflakeDriver", "jdbc:snowflake://%s/"); + SNOWFLAKE("net.snowflake.client.jdbc.SnowflakeDriver", "jdbc:snowflake://%s/"), + YUGABYTEDB("com.yugabyte.Driver", "jdbc:yugabytedb://%s:%d/%s"); private final String driverClassName; private final String urlFormatString; diff --git a/airbyte-integrations/connectors/destination-yugabytedb/.dockerignore b/airbyte-integrations/connectors/destination-yugabytedb/.dockerignore new file mode 100644 index 000000000000..65c7d0ad3e73 --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/.dockerignore @@ -0,0 +1,3 @@ +* +!Dockerfile +!build diff --git a/airbyte-integrations/connectors/destination-yugabytedb/Dockerfile b/airbyte-integrations/connectors/destination-yugabytedb/Dockerfile new file mode 100644 index 000000000000..fe75d31d32ba --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/Dockerfile @@ -0,0 +1,18 @@ +FROM airbyte/integration-base-java:dev AS build + +WORKDIR /airbyte +ENV APPLICATION destination-yugabytedb + +COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar + +RUN tar xf ${APPLICATION}.tar --strip-components=1 && rm -rf ${APPLICATION}.tar + +FROM airbyte/integration-base-java:dev + +WORKDIR /airbyte +ENV APPLICATION destination-yugabytedb + +COPY --from=build /airbyte /airbyte + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/destination-yugabytedb diff --git a/airbyte-integrations/connectors/destination-yugabytedb/README.md b/airbyte-integrations/connectors/destination-yugabytedb/README.md new file mode 100644 index 000000000000..7339896b055c --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/README.md @@ -0,0 +1,68 @@ +# Destination Yugabytedb + +This is the repository for the Yugabytedb destination connector in Java. +For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/yugabytedb). + +## Local development + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:destination-yugabytedb:build +``` + +#### Create credentials +**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. +Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. + +**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. + +### Locally running the connector docker image + +#### Build +Build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:destination-yugabytedb:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-yugabytedb:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-yugabytedb:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-yugabytedb:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-yugabytedb:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +We use `JUnit` for Java tests. + +### Unit and Integration Tests +Place unit tests under `src/test/io/airbyte/integrations/destinations/yugabytedb`. + +#### Acceptance Tests +Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in +`src/test-integration/java/io/airbyte/integrations/destinations/yugabytedbDestinationAcceptanceTest.java`. + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:destination-yugabytedb:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:destination-yugabytedb:integrationTest +``` + +## Dependency Management + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-yugabytedb/bootstrap.md b/airbyte-integrations/connectors/destination-yugabytedb/bootstrap.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/airbyte-integrations/connectors/destination-yugabytedb/build.gradle b/airbyte-integrations/connectors/destination-yugabytedb/build.gradle new file mode 100644 index 000000000000..51ae7a928363 --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/build.gradle @@ -0,0 +1,32 @@ +plugins { + id 'application' + id 'airbyte-docker' + id 'airbyte-integration-test-java' +} + +application { + mainClass = 'io.airbyte.integrations.destination.yugabytedb.YugabytedbDestination' +} + +dependencies { + implementation project(':airbyte-config:config-models') + implementation project(':airbyte-protocol:protocol-models') + implementation project(':airbyte-integrations:bases:base-java') + implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) + implementation project(':airbyte-integrations:connectors:destination-jdbc') + implementation project(':airbyte-db:db-lib') + + implementation 'com.yugabyte:jdbc-yugabytedb:42.3.5-yb-1' + + testImplementation project(':airbyte-integrations:bases:standard-destination-test') + + testImplementation "org.assertj:assertj-core:3.21.0" + testImplementation "org.junit.jupiter:junit-jupiter:5.8.1" + testImplementation "org.testcontainers:junit-jupiter:1.17.5" + testImplementation "org.testcontainers:jdbc:1.17.5" + + + + integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') + integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-yugabytedb') +} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/docker-compose.yml b/airbyte-integrations/connectors/destination-yugabytedb/docker-compose.yml new file mode 100644 index 000000000000..cbd967d1f4af --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/docker-compose.yml @@ -0,0 +1,31 @@ +version: '3' + + +# Note: add mount points at /mnt/master and /mnt/tserver for persistence + +services: + yb-master: + image: yugabytedb/yugabyte:latest + container_name: yb-master-n1 + command: [ "/home/yugabyte/bin/yb-master", + "--fs_data_dirs=/mnt/master", + "--master_addresses=yb-master-n1:7100", + "--rpc_bind_addresses=yb-master-n1:7100", + "--replication_factor=1"] + ports: + - "7000:7000" + + yb-tserver: + image: yugabytedb/yugabyte:latest + container_name: yb-tserver-n1 + command: [ "/home/yugabyte/bin/yb-tserver", + "--fs_data_dirs=/mnt/tserver", + "--start_pgsql_proxy", + "--rpc_bind_addresses=yb-tserver-n1:9100", + "--tserver_master_addrs=yb-master-n1:7100"] + ports: + - "9042:9042" + - "5433:5433" + - "9000:9000" + depends_on: + - yb-master diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestination.java b/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestination.java new file mode 100644 index 000000000000..c7d6fca26337 --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestination.java @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.yugabytedb; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.db.factory.DatabaseDriver; +import io.airbyte.db.jdbc.JdbcUtils; +import io.airbyte.integrations.base.IntegrationRunner; +import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; +import java.util.Collections; +import java.util.Map; +import java.util.Optional; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class YugabytedbDestination extends AbstractJdbcDestination { + + private static final Logger LOGGER = LoggerFactory.getLogger(YugabytedbDestination.class); + + public static final String DRIVER_CLASS = DatabaseDriver.YUGABYTEDB.getDriverClassName(); + + public YugabytedbDestination() { + super(DRIVER_CLASS, new YugabytedbNamingTransformer(), new YugabytedbSqlOperations()); + } + + public static void main(String[] args) throws Exception { + LOGGER.info("starting destination: {}", YugabytedbDestination.class); + new IntegrationRunner(new YugabytedbDestination()).run(args); + LOGGER.info("completed destination: {}", YugabytedbDestination.class); + } + + @Override + protected Map getDefaultConnectionProperties(JsonNode config) { + return Collections.emptyMap(); + } + + @Override + public JsonNode toJdbcConfig(JsonNode config) { + String schema = + Optional.ofNullable(config.get(JdbcUtils.SCHEMA_KEY)).map(JsonNode::asText).orElse("public"); + + String jdbcUrl = "jdbc:yugabytedb://" + config.get(JdbcUtils.HOST_KEY).asText() + ":" + + config.get(JdbcUtils.PORT_KEY).asText() + "/" + + config.get(JdbcUtils.DATABASE_KEY).asText(); + + ImmutableMap.Builder configBuilder = ImmutableMap.builder() + .put(JdbcUtils.USERNAME_KEY, config.get(JdbcUtils.USERNAME_KEY).asText()) + .put(JdbcUtils.JDBC_URL_KEY, jdbcUrl) + .put(JdbcUtils.SCHEMA_KEY, schema); + + if (config.has(JdbcUtils.PASSWORD_KEY)) { + configBuilder.put(JdbcUtils.PASSWORD_KEY, config.get(JdbcUtils.PASSWORD_KEY).asText()); + } + + if (config.has(JdbcUtils.JDBC_URL_PARAMS_KEY)) { + configBuilder.put(JdbcUtils.JDBC_URL_PARAMS_KEY, config.get(JdbcUtils.JDBC_URL_PARAMS_KEY).asText()); + } + + return Jsons.jsonNode(configBuilder.build()); + } + +} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformer.java b/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformer.java new file mode 100644 index 000000000000..60b8ab1b44fb --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformer.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.yugabytedb; + +import io.airbyte.integrations.destination.ExtendedNameTransformer; + +public class YugabytedbNamingTransformer extends ExtendedNameTransformer { + + @Override + public String applyDefaultCase(final String input) { + return input.toLowerCase(); + } + +} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbSqlOperations.java b/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbSqlOperations.java new file mode 100644 index 000000000000..cff16136ac2c --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbSqlOperations.java @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.yugabytedb; + +import com.yugabyte.copy.CopyManager; +import com.yugabyte.core.BaseConnection; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.destination.jdbc.JdbcSqlOperations; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.List; + +public class YugabytedbSqlOperations extends JdbcSqlOperations { + + @Override + protected void insertRecordsInternal(JdbcDatabase database, + List records, + String schemaName, + String tableName) + throws Exception { + + if (records.isEmpty()) { + return; + } + + File tempFile = null; + try { + tempFile = Files.createTempFile(tableName + "-", ".tmp").toFile(); + writeBatchToFile(tempFile, records); + + File finalTempFile = tempFile; + database.execute(connection -> { + + var copyManager = new CopyManager(connection.unwrap(BaseConnection.class)); + var sql = String.format("COPY %s.%s FROM STDIN DELIMITER ',' CSV", schemaName, tableName); + + try (var bufferedReader = new BufferedReader(new FileReader(finalTempFile, StandardCharsets.UTF_8))) { + copyManager.copyIn(sql, bufferedReader); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); + } finally { + if (tempFile != null) { + Files.delete(tempFile.toPath()); + } + } + } + +} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-yugabytedb/src/main/resources/spec.json new file mode 100644 index 000000000000..fe77cdd07639 --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/src/main/resources/spec.json @@ -0,0 +1,65 @@ +{ + "documentationUrl": "https://docs.airbyte.io/integrations/destinations/yugabytedb", + "supportsIncremental": true, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": ["overwrite", "append"], + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Yugabytedb destination spec", + "type": "object", + "required": ["host", "port", "username", "database", "schema"], + "additionalProperties": true, + "properties": { + "host": { + "title": "Host", + "description": "The Hostname of the database.", + "type": "string", + "order": 0 + }, + "port": { + "title": "Port", + "description": "The Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 3306, + "examples": ["3306"], + "order": 1 + }, + "database": { + "title": "Database", + "description": "Name of the database.", + "type": "string", + "order": 2 + }, + "username": { + "title": "Username", + "description": "The Username which is used to access the database.", + "type": "string", + "order": 3 + }, + "schema": { + "title": "Default Schema", + "description": "The default schema tables are written to if the source does not specify a namespace. The usual value for this field is \"public\".", + "type": "string", + "examples": ["public"], + "default": "public", + "order": 3 + }, + "password": { + "title": "Password", + "description": "The Password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 4 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "title": "JDBC URL Params", + "type": "string", + "order": 5 + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabyteDataSource.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabyteDataSource.java new file mode 100644 index 000000000000..08f5c81f1ca2 --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabyteDataSource.java @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.yugabytedb; + +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.DatabaseDriver; +import java.util.Collections; +import javax.sql.DataSource; + +public class YugabyteDataSource { + + private YugabyteDataSource() { + + } + + static DataSource getInstance(String host, int port, String database, String username, String password) { + String jdbcUrl = "jdbc:yugabytedb://" + host + ":" + port + "/" + database; + return DataSourceFactory.create( + username, + password, + DatabaseDriver.YUGABYTEDB.getDriverClassName(), + jdbcUrl, + Collections.emptyMap()); + } + +} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbContainerInitializr.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbContainerInitializr.java new file mode 100644 index 000000000000..0f1022d43320 --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbContainerInitializr.java @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.yugabytedb; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.JdbcDatabaseContainer; +import org.testcontainers.utility.DockerImageName; + +public class YugabytedbContainerInitializr { + + private static final Logger LOGGER = LoggerFactory.getLogger(YugabytedbContainerInitializr.class); + + private static YugabytedbContainer yugabytedbContainer; + + private YugabytedbContainerInitializr() { + + } + + public static YugabytedbContainer initContainer() { + if (yugabytedbContainer == null) { + yugabytedbContainer = new YugabytedbContainer(); + } + yugabytedbContainer.start(); + return yugabytedbContainer; + } + + static class YugabytedbContainer extends JdbcDatabaseContainer { + + private static final int YUGABYTE_PORT = 5433; + + public YugabytedbContainer() { + super(DockerImageName.parse("yugabytedb/yugabyte:2.15.2.0-b87")); + + this.setCommand("bin/yugabyted", "start", "--daemon=false"); + this.addExposedPort(YUGABYTE_PORT); + + } + + @Override + public String getDriverClassName() { + return "com.yugabyte.Driver"; + } + + @Override + public String getJdbcUrl() { + String params = constructUrlParameters("?", "&"); + return "jdbc:yugabytedb://" + getHost() + ":" + getMappedPort(YUGABYTE_PORT) + "/yugabyte" + params; + } + + @Override + public String getDatabaseName() { + return "yugabyte"; + } + + @Override + public String getUsername() { + return "yugabyte"; + } + + @Override + public String getPassword() { + return "yugabyte"; + } + + @Override + protected String getTestQueryString() { + return "SELECT 1"; + } + + } + +} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationAcceptanceTest.java new file mode 100644 index 000000000000..0c4d70581041 --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationAcceptanceTest.java @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.yugabytedb; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.db.jdbc.DefaultJdbcDatabase; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.base.JavaBaseConstants; +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; +import java.sql.SQLException; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.TestInstance; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +public class YugabytedbDestinationAcceptanceTest extends JdbcDestinationAcceptanceTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(YugabytedbDestinationAcceptanceTest.class); + + private YugabytedbContainerInitializr.YugabytedbContainer yugabytedbContainer; + + private final ExtendedNameTransformer namingResolver = new ExtendedNameTransformer(); + + private JsonNode jsonConfig; + + private JdbcDatabase database; + + private static final Set cleanupTables = new HashSet<>(); + + @BeforeAll + void initContainer() { + yugabytedbContainer = YugabytedbContainerInitializr.initContainer(); + } + + @Override + protected String getImageName() { + return "airbyte/destination-yugabytedb:dev"; + } + + @Override + protected void setup(TestDestinationEnv testEnv) throws Exception { + jsonConfig = Jsons.jsonNode(ImmutableMap.builder() + .put("host", yugabytedbContainer.getHost()) + .put("port", yugabytedbContainer.getMappedPort(5433)) + .put("database", yugabytedbContainer.getDatabaseName()) + .put("username", yugabytedbContainer.getUsername()) + .put("password", yugabytedbContainer.getPassword()) + .put("schema", "public") + .build()); + + database = new DefaultJdbcDatabase(YugabyteDataSource.getInstance( + yugabytedbContainer.getHost(), + yugabytedbContainer.getMappedPort(5433), + yugabytedbContainer.getDatabaseName(), + yugabytedbContainer.getUsername(), + yugabytedbContainer.getPassword())); + + } + + @Override + protected void tearDown(TestDestinationEnv testEnv) throws Exception { + database.execute(connection -> { + var statement = connection.createStatement(); + cleanupTables.forEach(tb -> { + try { + statement.execute("DROP TABLE " + tb + ";"); + } catch (SQLException e) { + throw new RuntimeException(e); + } + }); + }); + cleanupTables.clear(); + } + + @Override + protected JsonNode getConfig() { + return jsonConfig; + } + + @Override + protected JsonNode getFailCheckConfig() { + return Jsons.jsonNode(ImmutableMap.builder() + .put("host", yugabytedbContainer.getHost()) + .put("port", yugabytedbContainer.getMappedPort(5433)) + .put("database", yugabytedbContainer.getDatabaseName()) + .put("username", "usr") + .put("password", "pw") + .put("schema", "public") + .build()); + } + + @Override + protected boolean implementsNamespaces() { + return true; + } + + @Override + protected TestDataComparator getTestDataComparator() { + return new AdvancedTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + + @Override + protected List retrieveRecords(TestDestinationEnv testEnv, + String streamName, + String namespace, + JsonNode streamSchema) + throws SQLException { + + String tableName = namingResolver.getRawTableName(streamName); + String schemaName = namingResolver.getNamespace(namespace); + cleanupTables.add(schemaName + "." + tableName); + return retrieveRecordsFromTable(tableName, schemaName); + } + + private List retrieveRecordsFromTable(final String tableName, final String schemaName) + throws SQLException { + + return database.bufferedResultSetQuery( + connection -> { + var statement = connection.createStatement(); + return statement.executeQuery( + String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, + JavaBaseConstants.COLUMN_NAME_EMITTED_AT)); + }, + rs -> Jsons.deserialize(rs.getString(JavaBaseConstants.COLUMN_NAME_DATA))); + } + +} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java new file mode 100644 index 000000000000..2578ca428285 --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java @@ -0,0 +1,50 @@ +package io.airbyte.integrations.destination.yugabytedb; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import java.util.Collections; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class YugabytedbDestinationTest { + + private YugabytedbDestination yugabytedbDestination; + + @BeforeEach + void setup() { + yugabytedbDestination = new YugabytedbDestination(); + } + + @Test + void testToJdbcConfig() { + + var config = Jsons.jsonNode(ImmutableMap.builder() + .put("host", "localhost") + .put("port", 5433) + .put("database", "yugabyte") + .put("username", "yugabyte") + .put("password", "yugabyte") + .put("schema", "public") + .build()); + + var jdbcConfig = yugabytedbDestination.toJdbcConfig(config); + + assertThat(jdbcConfig.get("schema").asText()).isEqualTo("public"); + assertThat(jdbcConfig.get("username").asText()).isEqualTo("yugabyte"); + assertThat(jdbcConfig.get("password").asText()).isEqualTo("yugabyte"); + assertThat(jdbcConfig.get("jdbc_url").asText()).isEqualTo("jdbc:yugabytedb://localhost:5433/yugabyte"); + + } + + @Test + void testGetDefaultConnectionProperties() { + + var map = yugabytedbDestination.getDefaultConnectionProperties(Jsons.jsonNode(Collections.emptyMap())); + + assertThat(map).isEmpty(); + + } + +} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java new file mode 100644 index 000000000000..5565bc9d2ef9 --- /dev/null +++ b/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java @@ -0,0 +1,27 @@ +package io.airbyte.integrations.destination.yugabytedb; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class YugabytedbNamingTransformerTest { + + private YugabytedbNamingTransformer yugabytedbNamingTransformer; + + @BeforeEach + void setup() { + yugabytedbNamingTransformer = new YugabytedbNamingTransformer(); + } + + @Test + void testApplyDefaultCase() { + + var defaultCase = yugabytedbNamingTransformer.applyDefaultCase("DEFAULT_CASE"); + + assertThat(defaultCase).isEqualTo("default_case"); + + } + + +} diff --git a/docs/integrations/README.md b/docs/integrations/README.md index b46b57ab0c42..1a00d3b3bb18 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -241,3 +241,4 @@ For more information about the grading system, see [Product Release Stages](http | [SQLite](destinations/sqlite.md) | Alpha | No | | [Streamr](destinations/streamr.md) | Alpha | No | | [TiDB](destinations/tidb.md) | Alpha | No | +| [Yugabytedb](destinations/yugabytedb.md) | Alpha | No | \ No newline at end of file diff --git a/docs/integrations/destinations/yugabytedb.md b/docs/integrations/destinations/yugabytedb.md new file mode 100644 index 000000000000..1e49f17659ec --- /dev/null +++ b/docs/integrations/destinations/yugabytedb.md @@ -0,0 +1,59 @@ +# Yugabytedb + +TODO: update this doc + +## Sync overview + +### Output schema + +Is the output schema fixed (e.g: for an API like Stripe)? If so, point to the connector's schema (e.g: link to Stripe’s documentation) or describe the schema here directly (e.g: include a diagram or paragraphs describing the schema). + +Describe how the connector's schema is mapped to Airbyte concepts. An example description might be: "MagicDB tables become Airbyte Streams and MagicDB columns become Airbyte Fields. In addition, an extracted\_at column is appended to each row being read." + +### Data type mapping + +This section should contain a table mapping each of the connector's data types to Airbyte types. At the moment, Airbyte uses the same types used by [JSONSchema](https://json-schema.org/understanding-json-schema/reference/index.html). `string`, `date-time`, `object`, `array`, `boolean`, `integer`, and `number` are the most commonly used data types. + +| Integration Type | Airbyte Type | Notes | +| :--- | :--- | :--- | + + +### Features + +This section should contain a table with the following format: + +| Feature | Supported?(Yes/No) | Notes | +| :--- | :--- | :--- | +| Full Refresh Sync | | | +| Incremental Sync | | | +| Replicate Incremental Deletes | | | +| For databases, WAL/Logical replication | | | +| SSL connection | | | +| SSH Tunnel Support | | | +| (Any other source-specific features) | | | + +### Performance considerations + +Could this connector hurt the user's database/API/etc... or put too much strain on it in certain circumstances? For example, if there are a lot of tables or rows in a table? What is the breaking point (e.g: 100mm> records)? What can the user do to prevent this? (e.g: use a read-only replica, or schedule frequent syncs, etc..) + +## Getting started + +### Requirements + +* What versions of this connector does this implementation support? (e.g: `postgres v3.14 and above`) +* What configurations, if any, are required on the connector? (e.g: `buffer_size > 1024`) +* Network accessibility requirements +* Credentials/authentication requirements? (e.g: A DB user with read permissions on certain tables) + +### Setup guide + +For each of the above high-level requirements as appropriate, add or point to a follow-along guide. See existing source or destination guides for an example. + +For each major cloud provider we support, also add a follow-along guide for setting up Airbyte to connect to that destination. See the Postgres destination guide for an example of what this should look like. + + +## CHANGELOG + +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:--------------------------------------------------------------|:------------------------| +| 0.1.0 | 2022-10-28 | [#18039](https://github.com/airbytehq/airbyte/pull/18039) | New Destination YugabyteDB | From 726102c7e10ff32bed4338893eac94bfdeb25286 Mon Sep 17 00:00:00 2001 From: Kyryl Skobylko Date: Fri, 28 Oct 2022 15:34:01 +0300 Subject: [PATCH 03/13] fix worker volumeMounts (#18604) --- charts/airbyte-worker/templates/deployment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/airbyte-worker/templates/deployment.yaml b/charts/airbyte-worker/templates/deployment.yaml index 981a466a943f..a42a5cb5d0f0 100644 --- a/charts/airbyte-worker/templates/deployment.yaml +++ b/charts/airbyte-worker/templates/deployment.yaml @@ -395,8 +395,8 @@ spec: {{- if .Values.containerSecurityContext }} securityContext: {{- toYaml .Values.containerSecurityContext | nindent 10 }} {{- end }} - {{- if eq .Values.global.deploymentMode "oss" }} volumeMounts: + {{- if eq .Values.global.deploymentMode "oss" }} - name: gcs-log-creds-volume mountPath: /secrets/gcs-log-creds readOnly: true From 0e2361d0e6f0f82530e691db1c8e4cc1bfc298f5 Mon Sep 17 00:00:00 2001 From: sarafonseca Date: Fri, 28 Oct 2022 10:13:45 -0300 Subject: [PATCH 04/13] =?UTF-8?q?=F0=9F=8E=89=20=20New=20Source:=20RD=20St?= =?UTF-8?q?ation=20Marketing=20[python=20cdk]=20(#18348)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add new source: RD Station Marketing * Update catalog with all streams * Update changelog with the rigth PR reference number * Minor fixes to RD Station docs * Change field replication_start_date to start_date * Fix unit tests after changing replication_start_date name * Fix typo * Refact next_page_token logic * Fix unit tests * Remove print function * Update airbyte-cdk version to 0.2 * Update airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/streams.py Co-authored-by: Marcos Marx * Change from yield to yield from * Apply format * add rd station to source def * update doc changelog * auto-bump connector version Co-authored-by: Marcos Marx Co-authored-by: marcosmarxm Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 7 + .../src/main/resources/seed/source_specs.yaml | 64 ++++++ airbyte-integrations/builds.md | 1 + .../source-rd-station-marketing/.dockerignore | 6 + .../source-rd-station-marketing/Dockerfile | 38 ++++ .../source-rd-station-marketing/README.md | 133 ++++++++++++ .../acceptance-test-config.yml | 22 ++ .../acceptance-test-docker.sh | 16 ++ .../source-rd-station-marketing/bootstrap.md | 29 +++ .../source-rd-station-marketing/build.gradle | 9 + .../integration_tests/__init__.py | 3 + .../integration_tests/abnormal_state.json | 14 ++ .../integration_tests/acceptance.py | 14 ++ .../integration_tests/configured_catalog.json | 49 +++++ .../integration_tests/invalid_config.json | 11 + .../integration_tests/sample_config.json | 11 + .../integration_tests/sample_state.json | 14 ++ .../source-rd-station-marketing/main.py | 13 ++ .../requirements.txt | 2 + .../source-rd-station-marketing/setup.py | 31 +++ .../source_rd_station_marketing/__init__.py | 8 + .../schemas/analytics_conversions.json | 30 +++ .../schemas/analytics_emails.json | 51 +++++ .../schemas/analytics_funnel.json | 24 +++ .../analytics_workflow_emails_statistics.json | 63 ++++++ .../schemas/emails.json | 56 +++++ .../schemas/embeddables.json | 24 +++ .../schemas/fields.json | 64 ++++++ .../schemas/landing_pages.json | 30 +++ .../schemas/popups.json | 27 +++ .../schemas/segmentations.json | 44 ++++ .../schemas/workflows.json | 24 +++ .../source_rd_station_marketing/source.py | 74 +++++++ .../source_rd_station_marketing/spec.json | 85 ++++++++ .../source_rd_station_marketing/streams.py | 201 ++++++++++++++++++ .../unit_tests/__init__.py | 3 + .../unit_tests/test_incremental_streams.py | 60 ++++++ .../unit_tests/test_source.py | 71 +++++++ .../unit_tests/test_streams.py | 112 ++++++++++ docs/integrations/README.md | 1 + .../sources/rd-station-marketing.md | 44 ++++ 41 files changed, 1583 insertions(+) create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/.dockerignore create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/Dockerfile create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/README.md create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/acceptance-test-config.yml create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/acceptance-test-docker.sh create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/bootstrap.md create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/build.gradle create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/abnormal_state.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/acceptance.py create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_state.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/main.py create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/requirements.txt create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/setup.py create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/__init__.py create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_conversions.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_emails.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_funnel.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_workflow_emails_statistics.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/emails.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/embeddables.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/fields.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/landing_pages.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/popups.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/segmentations.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/workflows.json create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/source.py create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/spec.json create mode 100755 airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/streams.py create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_incremental_streams.py create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_source.py create mode 100644 airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_streams.py create mode 100644 docs/integrations/sources/rd-station-marketing.md diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 693c626ff4de..d46a43b8d187 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -1011,6 +1011,13 @@ icon: retently.svg sourceType: api releaseStage: alpha +- name: RD Station Marketing + sourceDefinitionId: fb141f29-be2a-450b-a4f2-2cd203a00f84 + dockerRepository: airbyte/source-rd-station-marketing + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/rd-station-marketing + sourceType: api + releaseStage: alpha - name: RKI Covid sourceDefinitionId: d78e5de0-aa44-4744-aa4f-74c818ccfe19 dockerRepository: airbyte/source-rki-covid diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 6d77f9af2a46..4224bc7f2d54 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -10180,6 +10180,70 @@ path_in_connector_config: - "credentials" - "client_secret" +- dockerImage: "airbyte/source-rd-station-marketing:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/rd-station-marketing" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "RD Station Marketing Spec" + type: "object" + required: + - "start_date" + additionalProperties: true + properties: + authorization: + type: "object" + title: "Authentication Type" + description: "Choose one of the possible authorization method" + oneOf: + - title: "Sign in via RD Station (OAuth)" + type: "object" + required: + - "auth_type" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your RD Station developer application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your RD Station developer application" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining the new access token." + airbyte_secret: true + start_date: + title: "Start Date" + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. When specified and not\ + \ None, then stream will behave as incremental" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: + - "authorization" + - "0" + oauthFlowInitParameters: + - - "client_id" + - - "client_secret" + oauthFlowOutputParameters: + - - "refresh_token" - dockerImage: "airbyte/source-rki-covid:0.1.1" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/rki-covid" diff --git a/airbyte-integrations/builds.md b/airbyte-integrations/builds.md index 3f58255ffc52..1c8051245533 100644 --- a/airbyte-integrations/builds.md +++ b/airbyte-integrations/builds.md @@ -97,6 +97,7 @@ | Confluence | [![source-confluence](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-confluence%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-confluence) | | Qualaroo | [![source-qualaroo](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-qualaroo%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-qualaroo) | | QuickBooks | [![source-quickbooks-singer](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-quickbooks-singer%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-quickbooks-singer) | +| RD Station Marketing | [![source-rd-station-marketing](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-rd-station-marketing%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-rd-station-marketing) | | Recharge | [![source-recharge](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-recharge%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-recharge) | | Recurly | [![source-recurly](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-recurly%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-recurly) | | Redshift | [![source-redshift](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-redshift%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-redshift) | diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/.dockerignore b/airbyte-integrations/connectors/source-rd-station-marketing/.dockerignore new file mode 100644 index 000000000000..1ee8b479485f --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_rd_station_marketing +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/Dockerfile b/airbyte-integrations/connectors/source-rd-station-marketing/Dockerfile new file mode 100644 index 000000000000..327eabb2010a --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_rd_station_marketing ./source_rd_station_marketing + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-rd-station-marketing diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/README.md b/airbyte-integrations/connectors/source-rd-station-marketing/README.md new file mode 100644 index 000000000000..8337e8a1f952 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/README.md @@ -0,0 +1,133 @@ +# RD Station Marketing Source + +This is the repository for the RD Station Marketing source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/rd-station-marketing). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python3 -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-rd-station-marketing:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/rd-station-marketing) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_rd_station_marketing/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source rd-station test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-rd-station-marketing:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-rd-station-marketing:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-rd-station-marketing:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-rd-station-marketing:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-rd-station-marketing:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-rd-station-marketing:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). + +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-rd-station-marketing:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-rd-station-marketing:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/acceptance-test-config.yml b/airbyte-integrations/connectors/source-rd-station-marketing/acceptance-test-config.yml new file mode 100644 index 000000000000..7315fa50eda0 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/acceptance-test-config.yml @@ -0,0 +1,22 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-rd-station-marketing:dev +tests: + spec: + - spec_path: "source_rd_station_marketing/spec.json" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + timeout_seconds: 3600 + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + timeout_seconds: 3600 diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-rd-station-marketing/acceptance-test-docker.sh new file mode 100644 index 000000000000..c51577d10690 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/bootstrap.md b/airbyte-integrations/connectors/source-rd-station-marketing/bootstrap.md new file mode 100644 index 000000000000..fa71c6f5ca39 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/bootstrap.md @@ -0,0 +1,29 @@ +# RD Station Marketing + +## Overview + +RD Station Marketing is the leading Marketing Automation tool in Latin America. It is a software application that helps your company carry out better campaigns, nurture Leads, generate qualified business opportunities and achieve more results. From social media to email, Landing Pages, Pop-ups, even Automations and Analytics. + +## Authentication + +RD Station Marketing uses Oauth2 to authenticate. To get the credentials, you need first to create an App for private use in this [link](https://appstore.rdstation.com/en/publisher) (needs to be loged in to access). After that, follow [these](https://developers.rdstation.com/reference/autenticacao?lng=en) instructions to create the client_id and client_secret. + +## Endpoints + +There are eleven endpoints in RD Station Marketing Connector: + +- [Analytics Conversions](https://developers.rdstation.com/reference/get_platform-analytics-conversions?lng=en): Responds with conversion statistics for campaings and other marketing assets. +- [Analytics Emails](https://developers.rdstation.com/reference/get_platform-analytics-emails?lng=en): Responds with statistics about the emails sent with this tool. +- [Analytics Funnel](https://developers.rdstation.com/reference/get_platform-analytics-funnel): Responds with the sales funnel for a given period, grouped by day. +- [Analytics Workflow Emails Statistics](https://developers.rdstation.com/reference/get_platform-analytics-workflow-emails): Responds with statistics about emails sent via an automation flow. +- [Emails](https://developers.rdstation.com/reference/get_platform-emails): List all sent emails. +- [Embeddables](https://developers.rdstation.com/reference/get_platform-embeddables): Returns a list of all forms for an account. +- [Fields](https://developers.rdstation.com/reference/get_platform-contacts-fields): Returns all fields, customized and default, and its attributes. +- [Landing Pages](https://developers.rdstation.com/reference/get_platform-landing-pages): Returns a list of all landing pages for an account. +- [Pop-ups](https://developers.rdstation.com/reference/get_platform-popups): Returns a list of all pop-ups for an account. +- [Segmentations](https://developers.rdstation.com/reference/get_platform-segmentations): List all segmentations, custom and default. +- [Workflows](https://developers.rdstation.com/reference/get_platform-workflows): Returns all automation flows. + +## Quick Notes + +- The analytics streams are only supported if you have a Pro or Enterprise RD Station Account. The usage is available only to these plans. diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/build.gradle b/airbyte-integrations/connectors/source-rd-station-marketing/build.gradle new file mode 100644 index 000000000000..171dd1799b82 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_rd_station_marketing' +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/__init__.py b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..0f178109f511 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/abnormal_state.json @@ -0,0 +1,14 @@ +{ + "analytics_emails": { + "send_at": "2217-06-26 21:20:07" + }, + "analytics_funnel": { + "reference_day": "2217-06-26 21:20:07" + }, + "analytics_conversions": { + "asset_updated_at": "2217-06-26 21:20:07" + }, + "analytics_workflow_emails_statistics": { + "asset_updated_at": "2217-06-26 21:20:07" + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/acceptance.py new file mode 100644 index 000000000000..950b53b59d41 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..c76a23d355d2 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/configured_catalog.json @@ -0,0 +1,49 @@ +{ + "streams": [ + { + "stream": { + "name": "emails", + "json_schema": {}, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_cursor": true, + "default_cursor_field": [ + "update_time" + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "landing_pages", + "json_schema": {}, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_cursor": true, + "default_cursor_field": [ + "update_time" + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "segmentations", + "json_schema": {}, + "supported_sync_modes": [ + "full_refresh" + ], + "source_defined_cursor": true, + "default_cursor_field": [ + "update_time" + ] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + } + ] +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/invalid_config.json new file mode 100644 index 000000000000..178618cff3b3 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/invalid_config.json @@ -0,0 +1,11 @@ +{ + "authorization": + { + "auth_type": "Client", + "client_id": "fake-client-id", + "client_secret": "fake-client-secret", + "refresh_token": "fake-refresh-token" + }, + "replication_start_date": "2022-01-01T00:00:00Z", + "all_contacts_segmentation": "9999999999999" +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_config.json new file mode 100644 index 000000000000..5149ad122f2a --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_config.json @@ -0,0 +1,11 @@ +{ + "authorization": + { + "auth_type": "Client", + "client_id": "", + "client_secret": "", + "refresh_token": "" + }, + "replication_start_date": "2022-01-01T00:00:00Z", + "all_contacts_segmentation": "2050455" +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_state.json new file mode 100644 index 000000000000..e72298059ea4 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_state.json @@ -0,0 +1,14 @@ +{ + "analytics_emails": { + "send_at": "2022-06-26 21:20:07" + }, + "analytics_funnel": { + "reference_day": "2022-06-26 21:20:07" + }, + "analytics_conversions": { + "asset_updated_at": "2022-06-26 21:20:07" + }, + "analytics_workflow_emails_statistics": { + "updated_at": "2022-06-26 21:20:07" + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/main.py b/airbyte-integrations/connectors/source-rd-station-marketing/main.py new file mode 100644 index 000000000000..3e89331aff62 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_rd_station_marketing import SourceRDStationMarketing + +if __name__ == "__main__": + source = SourceRDStationMarketing() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/requirements.txt b/airbyte-integrations/connectors/source-rd-station-marketing/requirements.txt new file mode 100644 index 000000000000..0411042aa091 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/setup.py b/airbyte-integrations/connectors/source-rd-station-marketing/setup.py new file mode 100644 index 000000000000..b3a9ab68c439 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/setup.py @@ -0,0 +1,31 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", + "responses~=0.13.3", + "requests-mock", +] + +setup( + name="source_rd_station_marketing", + description="Source implementation for RD Station Marketing.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/__init__.py b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/__init__.py new file mode 100644 index 000000000000..c548f26f7235 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceRDStationMarketing + +__all__ = ["SourceRDStationMarketing"] diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_conversions.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_conversions.json new file mode 100644 index 000000000000..b285d069ca0d --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_conversions.json @@ -0,0 +1,30 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "asset_id": { + "type": ["null", "integer"] + }, + "asset_identifier": { + "type": ["null", "string"] + }, + "asset_created_at": { + "type": ["null", "string"] + }, + "asset_updated_at": { + "type": ["null", "string"] + }, + "asset_type": { + "type": ["null", "string"] + }, + "conversion_count": { + "type": ["null", "integer"] + }, + "visits_count": { + "type": ["null", "integer"] + }, + "conversion_rate": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_emails.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_emails.json new file mode 100644 index 000000000000..6b74fb2db79b --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_emails.json @@ -0,0 +1,51 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "send_at": { + "type": ["null", "string"] + }, + "campaign_id": { + "type": ["null", "integer"] + }, + "campaign_name": { + "type": ["null", "string"] + }, + "email_dropped_count": { + "type": ["null", "integer"] + }, + "email_delivered_count": { + "type": ["null", "integer"] + }, + "email_bounced_count": { + "type": ["null", "integer"] + }, + "email_opened_count": { + "type": ["null", "integer"] + }, + "email_clicked_count": { + "type": ["null", "integer"] + }, + "email_unsubscribed_count": { + "type": ["null", "integer"] + }, + "email_spam_reported_count": { + "type": ["null", "integer"] + }, + "email_delivered_rate": { + "type": ["null", "number"] + }, + "email_opened_rate": { + "type": ["null", "number"] + }, + "email_clicked_rate": { + "type": ["null", "number"] + }, + "email_spam_reported_rate": { + "type": ["null", "number"] + }, + "contacts_count": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_funnel.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_funnel.json new file mode 100644 index 000000000000..8f144ccfa7de --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_funnel.json @@ -0,0 +1,24 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "reference_day": { + "type": ["null", "string"] + }, + "contacts_count": { + "type": ["null", "integer"] + }, + "qualified_contacts_count": { + "type": ["null", "integer"] + }, + "opportunities_count": { + "type": ["null", "integer"] + }, + "sales_count": { + "type": ["null", "integer"] + }, + "visitors_count": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_workflow_emails_statistics.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_workflow_emails_statistics.json new file mode 100644 index 000000000000..ffe7866f912d --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_workflow_emails_statistics.json @@ -0,0 +1,63 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "workflow_name": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "email_name": { + "type": ["null", "string"] + }, + "workflow_action_id": { + "type": ["null", "string"] + }, + "workflow_id": { + "type": ["null", "string"] + }, + "contacts_count": { + "type": ["null", "integer"] + }, + "count_processed": { + "type": ["null", "integer"] + }, + "email_delivered_count": { + "type": ["null", "integer"] + }, + "email_opened_unique_count": { + "type": ["null", "integer"] + }, + "email_clicked_unique_count": { + "type": ["null", "integer"] + }, + "email_dropped_count": { + "type": ["null", "integer"] + }, + "email_unsubscribed_count": { + "type": ["null", "integer"] + }, + "email_spam_reported_count": { + "type": ["null", "integer"] + }, + "email_delivered_rate": { + "type": ["null", "number"] + }, + "email_opened_rate": { + "type": ["null", "number"] + }, + "email_clicked_rate": { + "type": ["null", "number"] + }, + "email_spam_reported_rate": { + "type": ["null", "number"] + }, + "email_bounced_unique_count": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/emails.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/emails.json new file mode 100644 index 000000000000..d69b22ee0d19 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/emails.json @@ -0,0 +1,56 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "campaign_id": { + "type": ["null", "integer"] + }, + "behavior_score_info": { + "type": ["null", "object"], + "properties": { + "engaged": { + "type": ["null", "boolean"] + }, + "disengaged": { + "type": ["null", "boolean"] + }, + "indeterminate": { + "type": ["null", "boolean"] + } + } + }, + "send_at": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "sending_is_imminent": { + "type": ["null", "boolean"] + }, + "is_predictive_sending": { + "type": ["null", "boolean"] + }, + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "component_template_id": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "leads_count": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/embeddables.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/embeddables.json new file mode 100644 index 000000000000..052bb931c79c --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/embeddables.json @@ -0,0 +1,24 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "title": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "conversion_identifier": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/fields.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/fields.json new file mode 100644 index 000000000000..d740af4b0d0c --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/fields.json @@ -0,0 +1,64 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "uuid": { + "type": ["null", "string"] + }, + "label": { + "type": ["null", "object"], + "properties": { + "en-UD": { + "type": ["null", "string"] + }, + "en-US": { + "type": ["null", "string"] + }, + "es-ES": { + "type": ["null", "string"] + }, + "pt-BR": { + "type": ["null", "string"] + }, + "default": { + "type": ["null", "string"] + } + } + }, + "name": { + "type": ["null", "object"], + "properties": { + "en-UD": { + "type": ["null", "string"] + }, + "en-US": { + "type": ["null", "string"] + }, + "es-ES": { + "type": ["null", "string"] + }, + "pt-BR": { + "type": ["null", "string"] + }, + "default": { + "type": ["null", "string"] + } + } + }, + "api_identifier": { + "type": ["null", "string"] + }, + "custom_field": { + "type": ["null", "boolean"] + }, + "validation_rules": { + "type": ["null", "object"] + }, + "presentation_type": { + "type": ["null", "string"] + }, + "data_type": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/landing_pages.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/landing_pages.json new file mode 100644 index 000000000000..dc8e55175ab1 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/landing_pages.json @@ -0,0 +1,30 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "title": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "conversion_identifier": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "has_active_experiment": { + "type": ["null", "boolean"] + }, + "had_experiment": { + "type": ["null", "boolean"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/popups.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/popups.json new file mode 100644 index 000000000000..250310cc7748 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/popups.json @@ -0,0 +1,27 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "title": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "conversion_identifier": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "trigger": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/segmentations.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/segmentations.json new file mode 100644 index 000000000000..04a2d97658fb --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/segmentations.json @@ -0,0 +1,44 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "standard": { + "type": ["null", "boolean"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "process_status": { + "type": ["null", "string"] + }, + "links": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "rel": { + "type": ["null", "string"] + }, + "href": { + "type": ["null", "string"] + }, + "media": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/workflows.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/workflows.json new file mode 100644 index 000000000000..c9bd95a32671 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/workflows.json @@ -0,0 +1,24 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "user_email_created": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "user_email_updated": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/source.py b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/source.py new file mode 100644 index 000000000000..ee56f5e7d7d6 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/source.py @@ -0,0 +1,74 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from typing import Any, List, Mapping, Tuple + +import pendulum +from airbyte_cdk.logger import AirbyteLogger +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http.auth import Oauth2Authenticator +from source_rd_station_marketing.streams import ( + AnalyticsConversions, + AnalyticsEmails, + AnalyticsFunnel, + AnalyticsWorkflowEmailsStatistics, + Emails, + Embeddables, + Fields, + LandingPages, + Popups, + Segmentations, + Workflows, +) + + +class SourceRDStationMarketing(AbstractSource): + def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Any]: + try: + stream_kwargs = self.get_stream_kwargs(config) + segmentations = Segmentations(**stream_kwargs) + segmentations_gen = segmentations.read_records(sync_mode=SyncMode.full_refresh) + next(segmentations_gen) + return True, None + except Exception as error: + return ( + False, + f"Unable to connect to RD Station Marketing API with the provided credentials - {repr(error)}", + ) + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + """ + :param config: A Mapping of the user input configuration as defined in the connector spec. + """ + stream_kwargs = self.get_stream_kwargs(config) + incremental_kwargs = {**stream_kwargs, "start_date": pendulum.parse(config["start_date"])} + streams = [ + AnalyticsEmails(**incremental_kwargs), + AnalyticsConversions(**incremental_kwargs), + AnalyticsFunnel(**incremental_kwargs), + AnalyticsWorkflowEmailsStatistics(**incremental_kwargs), + Emails(**stream_kwargs), + Embeddables(**stream_kwargs), + Fields(**stream_kwargs), + LandingPages(**stream_kwargs), + Popups(**stream_kwargs), + Segmentations(**stream_kwargs), + Workflows(**stream_kwargs), + ] + return streams + + @staticmethod + def get_stream_kwargs(config: Mapping[str, Any]) -> Mapping[str, Any]: + authorization = config.get("authorization", {}) + stream_kwargs = dict() + + stream_kwargs["authenticator"] = Oauth2Authenticator( + token_refresh_endpoint="https://api.rd.services/auth/token", + client_secret=authorization.get("client_secret"), + client_id=authorization.get("client_id"), + refresh_token=authorization.get("refresh_token"), + ) + return stream_kwargs diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/spec.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/spec.json new file mode 100644 index 000000000000..72eeecef2168 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/spec.json @@ -0,0 +1,85 @@ +{ + "documentationUrl": "https://docs.airbyte.io/integrations/sources/rd-station-marketing", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "RD Station Marketing Spec", + "type": "object", + "required": [ + "start_date" + ], + "additionalProperties": true, + "properties": { + "authorization": { + "type": "object", + "title": "Authentication Type", + "description": "Choose one of the possible authorization method", + "oneOf": [ + { + "title": "Sign in via RD Station (OAuth)", + "type": "object", + "required": [ + "auth_type" + ], + "properties": { + "auth_type": { + "type": "string", + "const": "Client", + "order": 0 + }, + "client_id": { + "title": "Client ID", + "type": "string", + "description": "The Client ID of your RD Station developer application.", + "airbyte_secret": true + }, + "client_secret": { + "title": "Client Secret", + "type": "string", + "description": "The Client Secret of your RD Station developer application", + "airbyte_secret": true + }, + "refresh_token": { + "title": "Refresh Token", + "type": "string", + "description": "The token for obtaining the new access token.", + "airbyte_secret": true + } + } + } + ] + }, + "start_date": { + "title": "Start Date", + "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. When specified and not None, then stream will behave as incremental", + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", + "examples": [ + "2017-01-25T00:00:00Z" + ], + "type": "string" + } + } + }, + "supportsIncremental": true, + "authSpecification": { + "auth_type": "oauth2.0", + "oauth2Specification": { + "rootObject": [ + "authorization", + 0 + ], + "oauthFlowInitParameters": [ + [ + "client_id" + ], + [ + "client_secret" + ] + ], + "oauthFlowOutputParameters": [ + [ + "refresh_token" + ] + ] + } + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/streams.py b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/streams.py new file mode 100755 index 000000000000..25bca0799280 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/streams.py @@ -0,0 +1,201 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from abc import ABC +from datetime import date +from typing import Any, Iterable, Mapping, MutableMapping, Optional + +import pendulum +import requests +from airbyte_cdk.sources.streams.http import HttpStream + + +class RDStationMarketingStream(HttpStream, ABC): + data_field = None + extra_params = {} + page = 1 + page_size_limit = 125 + primary_key = None + url_base = "https://api.rd.services" + + def __init__(self, authenticator, start_date=None, **kwargs): + super().__init__(authenticator=authenticator, **kwargs) + self._start_date = start_date + + def path(self, **kwargs) -> str: + class_name = self.__class__.__name__ + return f"/platform/{class_name[0].lower()}{class_name[1:]}" + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + if self.data_field: + json_response = response.json().get(self.data_field) + else: + json_response = response.json() + if json_response: + self.page = self.page + 1 + return {"next_page": self.page} + else: + return None + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + params = {"page_size": self.page_size_limit, "page": self.page} + if next_page_token: + params = {"page_size": self.page_size_limit, "page": next_page_token["next_page"]} + return params + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + if self.data_field: + records = response.json().get(self.data_field) + else: + records = response.json() + yield from records + + +class IncrementalRDStationMarketingStream(RDStationMarketingStream): + def path(self, **kwargs) -> str: + return f"/platform/analytics/{self.data_field}" + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return None + + def request_params(self, stream_state: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: + start_date = self._start_date + + if start_date and stream_state.get(self.cursor_field): + start_date = max(pendulum.parse(stream_state[self.cursor_field]), start_date) + + params = {} + params.update( + { + "start_date": start_date.strftime("%Y-%m-%d"), + "end_date": date.today().strftime("%Y-%m-%d"), + } + ) + + params.update(self.extra_params) + return params + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + latest_benchmark = latest_record[self.cursor_field] + if current_stream_state.get(self.cursor_field): + return {self.cursor_field: max(latest_benchmark, current_stream_state[self.cursor_field])} + return {self.cursor_field: latest_benchmark} + + +class AnalyticsConversions(IncrementalRDStationMarketingStream): + """ + API docs: https://developers.rdstation.com/reference/get_platform-analytics-conversions + """ + + data_field = "conversions" + cursor_field = "asset_updated_at" + primary_key = "asset_id" + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + records = response.json().get(self.data_field)[0].get(self.data_field) + yield from records + + +class AnalyticsEmails(IncrementalRDStationMarketingStream): + """ + API docs: https://developers.rdstation.com/reference/get_platform-analytics-emails + """ + + data_field = "emails" + cursor_field = "send_at" + primary_key = "campaign_id" + + +class AnalyticsFunnel(IncrementalRDStationMarketingStream): + """ + API docs: https://developers.rdstation.com/reference/get_platform-analytics-funnel + """ + + data_field = "funnel" + cursor_field = "reference_day" + primary_key = "reference_day" + + +class AnalyticsWorkflowEmailsStatistics(IncrementalRDStationMarketingStream): + """ + API docs: https://developers.rdstation.com/reference/get_platform-analytics-workflow-emails + """ + + data_field = "workflow_email_statistics" + cursor_field = "updated_at" + primary_key = "workflow_id" + + def path(self, **kwargs) -> str: + return "/platform/analytics/workflow_emails_statistics" + + +class Emails(RDStationMarketingStream): + """ + API docs: https://developers.rdstation.com/reference/get_platform-emails + """ + + data_field = "items" + primary_key = "id" + + +class Embeddables(RDStationMarketingStream): + """ + API docs: https://developers.rdstation.com/reference/get_platform-embeddables + """ + + primary_key = "id" + + +class Fields(RDStationMarketingStream): + """ + API docs: https://developers.rdstation.com/reference/get_platform-contacts-fields + """ + + data_field = "fields" + primary_key = "uuid" + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return None + + def path(self, **kwargs) -> str: + return "/platform/contacts/fields" + + +class LandingPages(RDStationMarketingStream): + """ + API docs: https://developers.rdstation.com/reference/get_platform-landing-pages + """ + + primary_key = "id" + + def path(self, **kwargs) -> str: + return "/platform/landing_pages" + + +class Popups(RDStationMarketingStream): + """ + API docs: https://developers.rdstation.com/reference/get_platform-popups + """ + + primary_key = "id" + + +class Segmentations(RDStationMarketingStream): + """ + API docs: https://developers.rdstation.com/reference/get_platform-segmentations + """ + + data_field = "segmentations" + primary_key = "id" + + +class Workflows(RDStationMarketingStream): + """ + API docs: https://developers.rdstation.com/reference/get_platform-workflows + """ + + data_field = "workflows" + primary_key = "id" diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/__init__.py b/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_incremental_streams.py new file mode 100644 index 000000000000..6c99b32f1cc0 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_incremental_streams.py @@ -0,0 +1,60 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from airbyte_cdk.models import SyncMode +from pytest import fixture +from source_rd_station_marketing.streams import IncrementalRDStationMarketingStream + + +@fixture +def test_current_stream_state(): + return {"updated_time": "2021-10-22"} + + +@fixture +def patch_incremental_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(IncrementalRDStationMarketingStream, "path", "v0/example_endpoint") + mocker.patch.object(IncrementalRDStationMarketingStream, "primary_key", "test_primary_key") + mocker.patch.object(IncrementalRDStationMarketingStream, "__abstractmethods__", set()) + + +def test_cursor_field(patch_incremental_base_class): + stream = IncrementalRDStationMarketingStream(authenticator=MagicMock()) + expected_cursor_field = [] + assert stream.cursor_field == expected_cursor_field + + +def test_get_updated_state(patch_incremental_base_class, test_current_stream_state, mocker): + mocker.patch.object(IncrementalRDStationMarketingStream, "cursor_field", "updated_time") + stream = IncrementalRDStationMarketingStream(authenticator=MagicMock()) + inputs = {"current_stream_state": test_current_stream_state, "latest_record": test_current_stream_state} + expected_state = {"updated_time": "2021-10-22"} + assert stream.get_updated_state(**inputs) == expected_state + + +def test_stream_slices(patch_incremental_base_class): + stream = IncrementalRDStationMarketingStream(authenticator=MagicMock()) + inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} + expected_stream_slice = [None] + assert stream.stream_slices(**inputs) == expected_stream_slice + + +def test_supports_incremental(patch_incremental_base_class, mocker): + mocker.patch.object(IncrementalRDStationMarketingStream, "cursor_field", "dummy_field") + stream = IncrementalRDStationMarketingStream(authenticator=MagicMock()) + assert stream.supports_incremental + + +def test_source_defined_cursor(patch_incremental_base_class): + stream = IncrementalRDStationMarketingStream(authenticator=MagicMock()) + assert stream.source_defined_cursor + + +def test_stream_checkpoint_interval(patch_incremental_base_class): + stream = IncrementalRDStationMarketingStream(authenticator=MagicMock()) + expected_checkpoint_interval = None + assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_source.py b/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_source.py new file mode 100644 index 000000000000..9b11721310f0 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_source.py @@ -0,0 +1,71 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +import responses +from pytest import fixture +from source_rd_station_marketing.source import SourceRDStationMarketing + + +@fixture +def test_config(): + return { + "authorization": { + "auth_type": "Client", + "client_id": "test_client_id", + "client_secret": "test_client_secret", + "refresh_token": "test_refresh_token", + }, + "start_date": "2022-01-01T00:00:00Z", + } + + +def setup_responses(): + responses.add( + responses.POST, + "https://api.rd.services/auth/token", + json={"access_token": "fake_access_token", "expires_in": 3600}, + ) + responses.add( + responses.GET, + "https://api.rd.services/platform/segmentations", + json={ + "segmentations": [ + { + "id": 71625167165, + "name": "A mock segmentation", + "standard": True, + "created_at": "2019-09-04T18:05:42.638-03:00", + "updated_at": "2019-09-04T18:05:42.638-03:00", + "process_status": "processed", + "links": [ + { + "rel": "SEGMENTATIONS.CONTACTS", + "href": "https://api.rd.services/platform/segmentations/71625167165/contacts", + "media": "application/json", + "type": "GET", + } + ], + } + ] + }, + ) + + +@responses.activate +def test_check_connection(test_config): + setup_responses() + source = SourceRDStationMarketing() + logger_mock = MagicMock() + assert source.check_connection(logger_mock, test_config) == (True, None) + + +@responses.activate +def test_streams(test_config): + setup_responses() + source = SourceRDStationMarketing() + streams = source.streams(test_config) + expected_streams_number = 11 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_streams.py new file mode 100644 index 000000000000..7f3199465464 --- /dev/null +++ b/airbyte-integrations/connectors/source-rd-station-marketing/unit_tests/test_streams.py @@ -0,0 +1,112 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_rd_station_marketing.streams import RDStationMarketingStream, Segmentations + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(RDStationMarketingStream, "primary_key", "test_primary_key") + mocker.patch.object(RDStationMarketingStream, "__abstractmethods__", set()) + + +def test_path(patch_base_class): + stream = Segmentations(authenticator=MagicMock()) + assert stream.path() == "/platform/segmentations" + + +def test_request_params(patch_base_class): + stream = RDStationMarketingStream(authenticator=MagicMock()) + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_params = {"page": 1, "page_size": 125} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = RDStationMarketingStream(authenticator=MagicMock()) + inputs = {"response": MagicMock()} + expected_token = {"next_page": 2} + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response(patch_base_class): + stream = RDStationMarketingStream(authenticator=MagicMock()) + response = MagicMock() + response.json.return_value = [ + { + "id": 71625167165, + "name": "A mock segmentation", + "standard": True, + "created_at": "2019-09-04T18:05:42.638-03:00", + "updated_at": "2019-09-04T18:05:42.638-03:00", + "process_status": "processed", + "links": [ + { + "rel": "SEGMENTATIONS.CONTACTS", + "href": "https://api.rd.services/platform/segmentations/71625167165/contacts", + "media": "application/json", + "type": "GET", + } + ], + } + ] + inputs = {"response": response, "stream_state": None} + expected_parsed_object = { + "id": 71625167165, + "name": "A mock segmentation", + "standard": True, + "created_at": "2019-09-04T18:05:42.638-03:00", + "updated_at": "2019-09-04T18:05:42.638-03:00", + "process_status": "processed", + "links": [ + { + "rel": "SEGMENTATIONS.CONTACTS", + "href": "https://api.rd.services/platform/segmentations/71625167165/contacts", + "media": "application/json", + "type": "GET", + } + ], + } + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_request_headers(patch_base_class): + stream = RDStationMarketingStream(authenticator=MagicMock()) + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_headers = {} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = RDStationMarketingStream(authenticator=MagicMock()) + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = RDStationMarketingStream(authenticator=MagicMock()) + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = RDStationMarketingStream(authenticator=MagicMock()) + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/docs/integrations/README.md b/docs/integrations/README.md index 1a00d3b3bb18..29617f3f24c9 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -144,6 +144,7 @@ For more information about the grading system, see [Product Release Stages](http | [PrestaShop](sources/presta-shop.md) | Alpha | Yes | | [Qualaroo](sources/qualaroo.md) | Alpha | Yes | | [QuickBooks](sources/quickbooks.md) | Alpha | No | +| [RD Station Marketing](sources/rd-station-marketing.md) | Alpha | No | | [Recharge](sources/recharge.md) | Generally Available | Yes | | [Recurly](sources/recurly.md) | Alpha | Yes | | [Redshift](sources/redshift.md) | Alpha | Yes | diff --git a/docs/integrations/sources/rd-station-marketing.md b/docs/integrations/sources/rd-station-marketing.md new file mode 100644 index 000000000000..989d4e205499 --- /dev/null +++ b/docs/integrations/sources/rd-station-marketing.md @@ -0,0 +1,44 @@ +# RD Station Marketing + +RD Station Marketing is the leading Marketing Automation tool in Latin America. It is a software application that helps your company carry out better campaigns, nurture Leads, generate qualified business opportunities and achieve more results. From social media to email, Landing Pages, Pop-ups, even Automations and Analytics. + +## Prerequisites +* An RD Station account +* A callback URL to receive the first account credential (can be done using localhost) +* `client_id` and `client_secret` credentials. Access [this link](https://appstore.rdstation.com/en/publisher) to register a new application and start the authentication flow. + +## Airbyte Open Source +* Start Date +* Client Id +* Client Secret +* Refresh token + +## Supported sync modes + +The RD Station Marketing source connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): + - Full Refresh + - Incremental (for analytics endpoints) + +## Supported Streams + +* conversions (analytics endpoint) +* emails (analytics endpoint) +* funnel (analytics endpoint) +* workflow_emails_statistics (analytics endpoint) +* emails +* embeddables +* fields +* landing_pages +* popups +* segmentations +* workflows + +## Performance considerations + +Each endpoint has its own performance limitations, which also consider the account plan. For more informations, visit the page [API request limit](https://developers.rdstation.com/reference/limite-de-requisicoes-da-api?lng=en). + +## Changelog + +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :------------------------------------------------------- | :-------------------------------------------------------------- | +| 0.1.0 | 2022-10-23 | [18348](https://github.com/airbytehq/airbyte/pull/18348) | Initial Release | From d840a8a80d0da63c8758397ed45c4dba90c61ceb Mon Sep 17 00:00:00 2001 From: Taras Korenko Date: Fri, 28 Oct 2022 17:08:06 +0300 Subject: [PATCH 05/13] Simplify the OSS documentation deploy system (#2670) (#18598) + Adds Slack notification about failed Docs builds (slack notifications fall into "#oss-master-build-failure" for now) + (while here) Unbreaks docs build --- .github/workflows/deploy-docs-site.yml | 20 ++++++++++++++++--- .../supported-data-types.md | 2 +- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/.github/workflows/deploy-docs-site.yml b/.github/workflows/deploy-docs-site.yml index 0a66469b9bc5..2313bfdb5e08 100644 --- a/.github/workflows/deploy-docs-site.yml +++ b/.github/workflows/deploy-docs-site.yml @@ -13,17 +13,16 @@ on: jobs: dummy-job: name: Deploy Docs Assets + if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' runs-on: ubuntu-latest steps: - name: Check out the repository - if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' uses: actions/checkout@v3 with: fetch-depth: 0 # Node.js is needed for Yarn - name: Setup Yarn - if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' uses: actions/setup-node@v2 with: node-version: '16.14.0' @@ -31,7 +30,22 @@ jobs: cache-dependency-path: docusaurus - name: Run Docusaurus - if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: ./tools/bin/deploy_docusaurus + + - name: Notify Slack channel on failure + uses: abinoda/slack-action@master + if: failure() + env: + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN_AIRBYTE_TEAM }} + with: + # 'C03BEADRPNY' channel => '#oss-master-build-failure' + args: >- + {\"channel\":\"C03BEADRPNY\", \"blocks\":[ + {\"type\":\"divider\"}, + {\"type\":\"section\",\"text\":{\"type\":\"mrkdwn\",\"text\":\"OSS Docs build fails on the latest master :bangbang: \n\n\"}}, + {\"type\":\"section\",\"text\":{\"type\":\"mrkdwn\",\"text\":\"_merged by_: *${{ github.actor }}* \n\"}}, + {\"type\":\"section\",\"text\":{\"type\":\"mrkdwn\",\"text\":\" :octavia-shocked: :octavia-shocked: \n\"}}, + {\"type\":\"divider\"}]} + diff --git a/docs/understanding-airbyte/supported-data-types.md b/docs/understanding-airbyte/supported-data-types.md index 861faf320dde..976173491e50 100644 --- a/docs/understanding-airbyte/supported-data-types.md +++ b/docs/understanding-airbyte/supported-data-types.md @@ -2,7 +2,7 @@ AirbyteRecords are required to conform to the Airbyte type system. This means that all sources must produce schemas and records within these types, and all destinations must handle records that conform to this type system. -Because Airbyte's interfaces are JSON-based, this type system is realized using [JSON schemas](https://json-schema.org/). In order to work around some limitations of JSON schemas, we define our own types - see [well_known_types.yaml](airbyte-protocol/protocol-models/src/main/resources/airbyte_protocol/well_known_types.yaml). Sources should use `$ref` to reference these types, rather than directly defining JsonSchema entries. +Because Airbyte's interfaces are JSON-based, this type system is realized using [JSON schemas](https://json-schema.org/). In order to work around some limitations of JSON schemas, we define our own types - see [well_known_types.yaml](https://github.com/airbytehq/airbyte/blob/111131a193359027d0081de1290eb4bb846662ef/airbyte-protocol/protocol-models/src/main/resources/airbyte_protocol/well_known_types.yaml). Sources should use `$ref` to reference these types, rather than directly defining JsonSchema entries. In an older version of the protocol, we relied on an `airbyte_type` property in schemas. This has been replaced by the well-known type schemas. All "old-style" types map onto well-known types. For example, a legacy connector producing a field of type `{"type": "string", "airbyte_type": "timestamp_with_timezone"}` is treated as producing `{"$ref": "WellKnownTypes.json#definitions/TimestampWithTimezone"}`. From b71ec081715ed1d57a76515b279c9533f2ce81ae Mon Sep 17 00:00:00 2001 From: Oyindamola Olatunji <36523905+ikeadeoyin@users.noreply.github.com> Date: Fri, 28 Oct 2022 15:48:25 +0100 Subject: [PATCH 06/13] improved the digitalocean deployment guide (#18566) --- .../on-digitalocean-droplet.md | 101 +++++++----------- 1 file changed, 36 insertions(+), 65 deletions(-) diff --git a/docs/deploying-airbyte/on-digitalocean-droplet.md b/docs/deploying-airbyte/on-digitalocean-droplet.md index 6db2bcd78d37..99f0d75a2df6 100644 --- a/docs/deploying-airbyte/on-digitalocean-droplet.md +++ b/docs/deploying-airbyte/on-digitalocean-droplet.md @@ -1,86 +1,57 @@ -# On DigitalOcean (Droplet) +# Deploy on DigitalOcean -The instructions have been tested on `DigitalOcean Droplet ($5)`. +This page guides you through deploying Airbyte Open Source on a DigitalOcean droplet by setting up the deployment environment, and installing and starting Airbyte. -Alternatively, you can one-click deploy Airbyte to DigitalOcean using their marketplace:
-Deploy to DigitalOcean +Alternatively, you can deploy Airbyte on DigitalOcean in one click using their [marketplace](https://cloud.digitalocean.com/droplets/new?onboarding_origin=marketplace&appId=95451155&image=airbyte&utm_source=deploying-airbyte_on-digitalocean-droplet). -## Create a new droplet +## Requirements -* Launch a new droplet +- To test Airbyte, we recommend a $20/month droplet. +- To deploy Airbyte in a production environment, we recommend a $40/month instance. -![](../.gitbook/assets/digitalocean_launch_droplet.png) +## Set up the Environment -* Select image distribution +To deploy Airbyte Open Source on DigitalOcean: -![](../.gitbook/assets/dg_choose_ditribution.png) +1. [Create a DigitalOcean droplet](https://docs.digitalocean.com/products/droplets/how-to/create/). +2. Connect to the droplet using the [Droplet Console](https://www.google.com/url?q=https://docs.digitalocean.com/products/droplets/how-to/connect-with-console/&sa=D&source=docs&ust=1666280581103312&usg=AOvVaw1hyEPyjRsmsRdIgbxZdu6F). +3. To update the available packages and install Docker, run the following command: -* Select droplet type - * For testing out Airbyte, a `$20/month` droplet is likely sufficient. - * For long-running Airbyte installations, we recommend a `$40/month` instance. + ```bash + sudo apt update + sudo apt install apt-transport-https ca-certificates curl software-properties-common + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - + sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable" + sudo apt install docker-ce + sudo systemctl status docker + sudo usermod -aG docker ${USER} + su - ${USER} + ``` -![](../.gitbook/assets/dg_droplet_type.png) - -* `Region` - * Generally, choose a datacenter close to you. -* `Authentication` - * Password -* `Create Droplet` - -![](../.gitbook/assets/dg_droplet_creating.png) - -* Wait for the droplet to enter the `Running` state. - -## Install environment - -Note: The following commands will be entered either on your local terminal or in your ssh session on the instance terminal. The comments above each command block will indicate where to enter the commands. - -* Connect to your instance - -* `Click on Console` - -![](../.gitbook/assets/dg_console.png) - -* Install `docker` +4. To install Docker-Compose, run the following command: ```bash -# In your ssh session on the instance terminal -sudo apt update -sudo apt install apt-transport-https ca-certificates curl software-properties-common -curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - -sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable" -sudo apt install docker-ce -sudo systemctl status docker -sudo usermod -aG docker ${USER} -su - ${USER} + sudo wget https://github.com/docker/compose/releases/download/1.26.2/docker-compose-$(uname -s)-$(uname -m) -O /usr/local/bin/docker-compose + sudo chmod +x /usr/local/bin/docker-compose + docker-compose --version ``` -* Install `docker-compose` +## Install Airbyte -```bash -# In your ssh session on the instance terminal -sudo wget https://github.com/docker/compose/releases/download/1.26.2/docker-compose-$(uname -s)-$(uname -m) -O /usr/local/bin/docker-compose -sudo chmod +x /usr/local/bin/docker-compose -docker-compose --version -``` -## Install & start Airbyte - -* Connect to your instance - -* `Click on Console` +To install and start Airbyte : -![](../.gitbook/assets/dg_console.png) - -* Install Airbyte +1. Run the following command: ```bash -# In your ssh session on the instance terminal -mkdir airbyte && cd airbyte -wget https://raw.githubusercontent.com/airbytehq/airbyte/master/{.env,docker-compose.yaml} -docker-compose up -d + mkdir airbyte && cd airbyte + wget https://raw.githubusercontent.com/airbytehq/airbyte/master/{.env,docker-compose.yaml} + docker-compose up -d ``` -## Troubleshooting +2. Verify that Airbyte is running: -If you encounter any issues, just connect to our [Slack](https://slack.airbyte.io). Our community will help! We also have a [FAQ](../troubleshooting/on-deploying.md) section in our docs for common problems. + #todo -add the ssh tunnel command + +## Troubleshooting +If you encounter any issues, reach out to our community on [Slack](https://slack.airbyte.com/). From 67c078dc594ed758eb491c9874eb639e21d4f22f Mon Sep 17 00:00:00 2001 From: Vladimir Date: Fri, 28 Oct 2022 18:16:51 +0300 Subject: [PATCH 07/13] fixed failed build (#18610) --- .../src/components/StreamTestingPanel/StreamSelector.tsx | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/airbyte-webapp/src/components/StreamTestingPanel/StreamSelector.tsx b/airbyte-webapp/src/components/StreamTestingPanel/StreamSelector.tsx index f2b38910c15a..578d29940b2d 100644 --- a/airbyte-webapp/src/components/StreamTestingPanel/StreamSelector.tsx +++ b/airbyte-webapp/src/components/StreamTestingPanel/StreamSelector.tsx @@ -7,6 +7,7 @@ import { Text } from "components/ui/Text"; import { useConnectorBuilderState } from "services/connectorBuilder/ConnectorBuilderStateService"; +import { Heading } from "../ui/Heading"; import styles from "./StreamSelector.module.scss"; export const StreamSelector: React.FC = () => { @@ -15,9 +16,9 @@ export const StreamSelector: React.FC = () => { return ( - + {selectedStream.name} - + {/* wrap in div to make `position: absolute` on Listbox.Options result in correct vertical positioning */} From 87663726649f6046d0e6c55743d3190095269d88 Mon Sep 17 00:00:00 2001 From: Dainius Salkauskas Date: Fri, 28 Oct 2022 19:20:05 +0300 Subject: [PATCH 08/13] =?UTF-8?q?=F0=9F=8E=89=20New=20Source:=20ConvertKit?= =?UTF-8?q?=20(#18455)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Init source omnisend * Removed unnecessary files * Init source convertkit * Added forms, sequences streams * Added tags stream * Added subscribers, broadcasts streams * Added documentation * Removed unnecessary files * Updated pull request information in documentation * Added sample config * Updated invalid config * Formatting, removed abnormal state, uncommented full_refresh acceptance test * Added pagination for subscribers stream * fix: add source definition for convertkit * auto-bump connector version Co-authored-by: Sajarin Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 7 ++ .../src/main/resources/seed/source_specs.yaml | 18 ++++ .../source-convertkit/.dockerignore | 6 ++ .../connectors/source-convertkit/Dockerfile | 38 ++++++++ .../connectors/source-convertkit/README.md | 79 +++++++++++++++++ .../connectors/source-convertkit/__init__.py | 3 + .../acceptance-test-config.yml | 30 +++++++ .../acceptance-test-docker.sh | 16 ++++ .../connectors/source-convertkit/build.gradle | 9 ++ .../integration_tests/__init__.py | 3 + .../integration_tests/acceptance.py | 16 ++++ .../integration_tests/configured_catalog.json | 49 +++++++++++ .../integration_tests/invalid_config.json | 3 + .../integration_tests/sample_config.json | 3 + .../integration_tests/sample_state.json | 5 ++ .../connectors/source-convertkit/main.py | 13 +++ .../source-convertkit/requirements.txt | 2 + .../connectors/source-convertkit/setup.py | 29 +++++++ .../source_convertkit/__init__.py | 8 ++ .../source_convertkit/convertkit.yaml | 87 +++++++++++++++++++ .../source_convertkit/schemas/broadcasts.json | 23 +++++ .../source_convertkit/schemas/forms.json | 83 ++++++++++++++++++ .../source_convertkit/schemas/sequences.json | 35 ++++++++ .../schemas/subscribers.json | 49 +++++++++++ .../source_convertkit/schemas/tags.json | 23 +++++ .../source_convertkit/source.py | 18 ++++ .../source_convertkit/spec.yaml | 13 +++ docs/integrations/README.md | 1 + docs/integrations/sources/convertkit.md | 36 ++++++++ 29 files changed, 705 insertions(+) create mode 100644 airbyte-integrations/connectors/source-convertkit/.dockerignore create mode 100644 airbyte-integrations/connectors/source-convertkit/Dockerfile create mode 100644 airbyte-integrations/connectors/source-convertkit/README.md create mode 100644 airbyte-integrations/connectors/source-convertkit/__init__.py create mode 100644 airbyte-integrations/connectors/source-convertkit/acceptance-test-config.yml create mode 100644 airbyte-integrations/connectors/source-convertkit/acceptance-test-docker.sh create mode 100644 airbyte-integrations/connectors/source-convertkit/build.gradle create mode 100644 airbyte-integrations/connectors/source-convertkit/integration_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-convertkit/integration_tests/acceptance.py create mode 100644 airbyte-integrations/connectors/source-convertkit/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-convertkit/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/source-convertkit/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/source-convertkit/integration_tests/sample_state.json create mode 100644 airbyte-integrations/connectors/source-convertkit/main.py create mode 100644 airbyte-integrations/connectors/source-convertkit/requirements.txt create mode 100644 airbyte-integrations/connectors/source-convertkit/setup.py create mode 100644 airbyte-integrations/connectors/source-convertkit/source_convertkit/__init__.py create mode 100644 airbyte-integrations/connectors/source-convertkit/source_convertkit/convertkit.yaml create mode 100644 airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/broadcasts.json create mode 100644 airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/forms.json create mode 100644 airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/sequences.json create mode 100644 airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/subscribers.json create mode 100644 airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/tags.json create mode 100644 airbyte-integrations/connectors/source-convertkit/source_convertkit/source.py create mode 100644 airbyte-integrations/connectors/source-convertkit/source_convertkit/spec.yaml create mode 100644 docs/integrations/sources/convertkit.md diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index d46a43b8d187..b4849beffecc 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -241,6 +241,13 @@ icon: confluence.svg sourceType: api releaseStage: alpha +- name: ConvertKit + sourceDefinitionId: be9ee02f-6efe-4970-979b-95f797a37188 + dockerRepository: airbyte/source-convertkit + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/convertkit + sourceType: api + releaseStage: alpha - name: Courier sourceDefinitionId: 0541b2cd-2367-4986-b5f1-b79ff55439e4 dockerRepository: airbyte/source-courier diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 4224bc7f2d54..a330eb401404 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -2365,6 +2365,24 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-convertkit:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/convertkit" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Convertkit Spec" + type: "object" + required: + - "api_secret" + additionalProperties: true + properties: + api_secret: + type: "string" + description: "API Secret" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-courier:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/courier" diff --git a/airbyte-integrations/connectors/source-convertkit/.dockerignore b/airbyte-integrations/connectors/source-convertkit/.dockerignore new file mode 100644 index 000000000000..7239fffd0a29 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_convertkit +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-convertkit/Dockerfile b/airbyte-integrations/connectors/source-convertkit/Dockerfile new file mode 100644 index 000000000000..39466118d645 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_convertkit ./source_convertkit + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-convertkit diff --git a/airbyte-integrations/connectors/source-convertkit/README.md b/airbyte-integrations/connectors/source-convertkit/README.md new file mode 100644 index 000000000000..e616271b56ca --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/README.md @@ -0,0 +1,79 @@ +# Convertkit Source + +This is the repository for the Convertkit configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/convertkit). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-convertkit:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/convertkit) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_convertkit/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source convertkit test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-convertkit:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-convertkit:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-convertkit:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convertkit:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convertkit:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-convertkit:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-convertkit:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-convertkit:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-convertkit/__init__.py b/airbyte-integrations/connectors/source-convertkit/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-convertkit/acceptance-test-config.yml b/airbyte-integrations/connectors/source-convertkit/acceptance-test-config.yml new file mode 100644 index 000000000000..fa0f4391dccd --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/acceptance-test-config.yml @@ -0,0 +1,30 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-convertkit:dev +tests: + spec: + - spec_path: "source_convertkit/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file + # expect_records: + # path: "integration_tests/expected_records.txt" + # extra_fields: no + # exact_order: no + # extra_records: yes + # incremental: # TODO if your connector does not implement incremental sync, remove this block + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" + # future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-convertkit/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-convertkit/acceptance-test-docker.sh new file mode 100644 index 000000000000..c51577d10690 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-convertkit/build.gradle b/airbyte-integrations/connectors/source-convertkit/build.gradle new file mode 100644 index 000000000000..7e7db2e9e9c9 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_convertkit' +} diff --git a/airbyte-integrations/connectors/source-convertkit/integration_tests/__init__.py b/airbyte-integrations/connectors/source-convertkit/integration_tests/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-convertkit/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-convertkit/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-convertkit/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-convertkit/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..8bb19362a6f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/integration_tests/configured_catalog.json @@ -0,0 +1,49 @@ +{ + "streams": [ + { + "stream": { + "name": "forms", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "sequences", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "tags", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "subscribers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "broadcasts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-convertkit/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-convertkit/integration_tests/invalid_config.json new file mode 100644 index 000000000000..96e67fbca8a2 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_secret": "" +} diff --git a/airbyte-integrations/connectors/source-convertkit/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-convertkit/integration_tests/sample_config.json new file mode 100644 index 000000000000..434c2977d8ba --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_secret": "" +} diff --git a/airbyte-integrations/connectors/source-convertkit/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-convertkit/integration_tests/sample_state.json new file mode 100644 index 000000000000..3587e579822d --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-convertkit/main.py b/airbyte-integrations/connectors/source-convertkit/main.py new file mode 100644 index 000000000000..0bf30250344d --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_convertkit import SourceConvertkit + +if __name__ == "__main__": + source = SourceConvertkit() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-convertkit/requirements.txt b/airbyte-integrations/connectors/source-convertkit/requirements.txt new file mode 100644 index 000000000000..0411042aa091 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-convertkit/setup.py b/airbyte-integrations/connectors/source-convertkit/setup.py new file mode 100644 index 000000000000..46a7c93718ad --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_convertkit", + description="Source implementation for Convertkit.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/__init__.py b/airbyte-integrations/connectors/source-convertkit/source_convertkit/__init__.py new file mode 100644 index 000000000000..668ce26356fd --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceConvertkit + +__all__ = ["SourceConvertkit"] diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/convertkit.yaml b/airbyte-integrations/connectors/source-convertkit/source_convertkit/convertkit.yaml new file mode 100644 index 000000000000..e3c90fbb0e80 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/convertkit.yaml @@ -0,0 +1,87 @@ +version: "0.1.0" +definitions: + selector: + extractor: + field_pointer: [ "{{ options['name'] }}" ] + requester: + # API Docs: https://developers.convertkit.com/#overview + url_base: "https://api.convertkit.com/v3" + http_method: "GET" + # API Docs: https://developers.convertkit.com/#api-basics + request_options_provider: + request_parameters: + api_secret: "{{ config['api_secret'] }}" + increment_paginator: + type: DefaultPaginator + url_base: "*ref(definitions.requester.url_base)" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + pagination_strategy: + type: PageIncrement + page_size: 50 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + # API Docs: https://developers.convertkit.com/#forms + forms_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "forms" + primary_key: "id" + path: "/forms" + # API Docs: https://developers.convertkit.com/#sequences + sequences_stream: + $ref: "*ref(definitions.base_stream)" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + extractor: + field_pointer: [ "courses" ] + $options: + name: "sequences" + primary_key: "id" + path: "/sequences" + # API Docs: https://developers.convertkit.com/#tags + tags_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "tags" + primary_key: "id" + path: "/tags" + retriever: + $ref: "*ref(definitions.retriever)" + paginator: + $ref: "*ref(definitions.increment_paginator)" + # API Docs: https://developers.convertkit.com/#subscribers + subscribers_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "subscribers" + primary_key: "id" + path: "/subscribers" + # API Docs: https://developers.convertkit.com/#broadcasts + broadcasts_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "broadcasts" + primary_key: "id" + path: "/broadcasts" +streams: + - "*ref(definitions.forms_stream)" + - "*ref(definitions.sequences_stream)" + - "*ref(definitions.tags_stream)" + - "*ref(definitions.subscribers_stream)" + - "*ref(definitions.broadcasts_stream)" +check: + stream_names: ["forms"] diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/broadcasts.json b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/broadcasts.json new file mode 100644 index 000000000000..2d9c6ef73ebd --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/broadcasts.json @@ -0,0 +1,23 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "created_at": { + "type": [ + "string", + "null" + ] + }, + "subject": { + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "id" + ] +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/forms.json b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/forms.json new file mode 100644 index 000000000000..95519975d677 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/forms.json @@ -0,0 +1,83 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "uid": { + "type": [ + "string", + "null" + ] + }, + "name": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": [ + "string", + "null" + ] + }, + "archived": { + "type": [ + "boolean", + "null" + ] + }, + "type": { + "type": [ + "string", + "null" + ] + }, + "url": { + "type": [ + "string", + "null" + ] + }, + "embed_js": { + "type": [ + "string", + "null" + ] + }, + "embed_url": { + "type": [ + "string", + "null" + ] + }, + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "sign_up_button_text": { + "type": [ + "string", + "null" + ] + }, + "success_message": { + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "id" + ] +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/sequences.json b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/sequences.json new file mode 100644 index 000000000000..3767adac7c42 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/sequences.json @@ -0,0 +1,35 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": [ + "string", + "null" + ] + }, + "hold": { + "type": [ + "boolean", + "null" + ] + }, + "repeat": { + "type": [ + "boolean", + "null" + ] + }, + "created_at": { + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "id" + ] +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/subscribers.json b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/subscribers.json new file mode 100644 index 000000000000..8e94599a7ed5 --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/subscribers.json @@ -0,0 +1,49 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "first_name": { + "type": [ + "string", + "null" + ] + }, + "email_address": { + "type": [ + "string", + "null" + ] + }, + "state": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": [ + "string", + "null" + ] + }, + "fields": { + "type": [ + "object", + "null" + ], + "properties": { + "last_name": { + "type": [ + "string", + "null" + ] + } + } + } + }, + "required": [ + "id" + ] +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/tags.json b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/tags.json new file mode 100644 index 000000000000..66ccdc68f34e --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/tags.json @@ -0,0 +1,23 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": [ + "string", + "null" + ] + }, + "created_at": { + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "id" + ] +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/source.py b/airbyte-integrations/connectors/source-convertkit/source_convertkit/source.py new file mode 100644 index 000000000000..6fd59d24c59f --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceConvertkit(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "convertkit.yaml"}) diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/spec.yaml b/airbyte-integrations/connectors/source-convertkit/source_convertkit/spec.yaml new file mode 100644 index 000000000000..a03e1252fb8a --- /dev/null +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/spec.yaml @@ -0,0 +1,13 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/convertkit +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Convertkit Spec + type: object + required: + - api_secret + additionalProperties: true + properties: + api_secret: + type: string + description: API Secret + airbyte_secret: true diff --git a/docs/integrations/README.md b/docs/integrations/README.md index 29617f3f24c9..1cff87839588 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -46,6 +46,7 @@ For more information about the grading system, see [Product Release Stages](http | [CockroachDB](sources/cockroachdb.md) | Alpha | No | | [Commercetools](sources/commercetools.md) | Alpha | No | | [Confluence](sources/confluence.md) | Alpha | No | +| [ConvertKit](sources/convertkit.md) | Alpha | No | | [Courier](sources/courier.md) | Alpha | No | | [Customer.io](sources/customer-io.md) | Alpha | No | | [Db2](sources/db2.md) | Alpha | No | diff --git a/docs/integrations/sources/convertkit.md b/docs/integrations/sources/convertkit.md new file mode 100644 index 000000000000..63d3cba8cd71 --- /dev/null +++ b/docs/integrations/sources/convertkit.md @@ -0,0 +1,36 @@ +# ConvertKit + +## Sync overview + +This source can sync data from the [ConvertKit API](https://developers.convertkit.com/#getting-started). At present this connector only supports full refresh syncs meaning that each time you use the connector it will sync all available records from scratch. Please use cautiously if you expect your API to have a lot of records. + +## This Source Supports the Following Streams + +* sequences +* subscribers +* broadcasts +* tags +* forms + +### Features + +| Feature | Supported?\(Yes/No\) | Notes | +| :--- | :--- | :--- | +| Full Refresh Sync | Yes | | +| Incremental Sync | No | | + +### Performance considerations + +The connector has a rate limit of no more than 120 requests over a rolling 60 second period, for a given api secret. + +## Getting started + +### Requirements + +* ConvertKit API Secret + +## Changelog + +| Version | Date | Pull Request | Subject | +|:--------|:-----------| :----------- |:-----------------------------------------------------------| +| 0.1.0 | 2022-10-25 | [18455](https://github.com/airbytehq/airbyte/pull/18455) | Initial commit | \ No newline at end of file From 3036ddae5aa66b3ad60efba8108e14b964700735 Mon Sep 17 00:00:00 2001 From: Balasubramanian T K Date: Fri, 28 Oct 2022 21:50:47 +0530 Subject: [PATCH 09/13] =?UTF-8?q?=F0=9F=8E=89New=20Source:=20Google=20Webf?= =?UTF-8?q?onts=20[low-code=20cdk]=20(#18496)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * New source: Google webfonts * chore: Add Docs * chore: update changelog * chore: resolved given comments for PR * chore: unwanted files removed * fix: generate and add source definitions Co-authored-by: sajarin --- .../resources/seed/source_definitions.yaml | 8 ++ .../src/main/resources/seed/source_specs.yaml | 28 +++++ .../source-google-webfonts/.dockerignore | 6 + .../source-google-webfonts/Dockerfile | 38 ++++++ .../source-google-webfonts/README.md | 103 ++++++++++++++++ .../source-google-webfonts/__init__.py | 3 + .../acceptance-test-config.yml | 20 ++++ .../acceptance-test-docker.sh | 16 +++ .../source-google-webfonts/bootstrap.md | 38 ++++++ .../source-google-webfonts/build.gradle | 9 ++ .../integration_tests/__init__.py | 3 + .../integration_tests/acceptance.py | 16 +++ .../integration_tests/catalog.json | 39 ++++++ .../integration_tests/configured_catalog.json | 13 ++ .../integration_tests/invalid_config.json | 6 + .../integration_tests/sample_config.json | 6 + .../connectors/source-google-webfonts/main.py | 13 ++ .../source-google-webfonts/requirements.txt | 2 + .../source-google-webfonts/setup.py | 29 +++++ .../source_google_webfonts/__init__.py | 8 ++ .../google_webfonts.yaml | 38 ++++++ .../source_google_webfonts/schemas/fonts.json | 111 ++++++++++++++++++ .../source_google_webfonts/source.py | 18 +++ .../source_google_webfonts/spec.yaml | 22 ++++ docs/integrations/README.md | 1 + docs/integrations/sources/google-webfonts.md | 68 +++++++++++ 26 files changed, 662 insertions(+) create mode 100644 airbyte-integrations/connectors/source-google-webfonts/.dockerignore create mode 100644 airbyte-integrations/connectors/source-google-webfonts/Dockerfile create mode 100644 airbyte-integrations/connectors/source-google-webfonts/README.md create mode 100644 airbyte-integrations/connectors/source-google-webfonts/__init__.py create mode 100644 airbyte-integrations/connectors/source-google-webfonts/acceptance-test-config.yml create mode 100644 airbyte-integrations/connectors/source-google-webfonts/acceptance-test-docker.sh create mode 100644 airbyte-integrations/connectors/source-google-webfonts/bootstrap.md create mode 100644 airbyte-integrations/connectors/source-google-webfonts/build.gradle create mode 100644 airbyte-integrations/connectors/source-google-webfonts/integration_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-google-webfonts/integration_tests/acceptance.py create mode 100644 airbyte-integrations/connectors/source-google-webfonts/integration_tests/catalog.json create mode 100644 airbyte-integrations/connectors/source-google-webfonts/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-google-webfonts/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/source-google-webfonts/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/source-google-webfonts/main.py create mode 100644 airbyte-integrations/connectors/source-google-webfonts/requirements.txt create mode 100644 airbyte-integrations/connectors/source-google-webfonts/setup.py create mode 100644 airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/__init__.py create mode 100644 airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/google_webfonts.yaml create mode 100644 airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/schemas/fonts.json create mode 100644 airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/source.py create mode 100644 airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/spec.yaml create mode 100644 docs/integrations/sources/google-webfonts.md diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index b4849beffecc..37d226008ecd 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -480,6 +480,14 @@ icon: google-sheets.svg sourceType: file releaseStage: generally_available +- name: Google Webfonts + sourceDefinitionId: a68fbcde-b465-4ab3-b2a6-b0590a875835 + dockerRepository: airbyte/source-google-webfonts + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/google-webfonts + icon: googleworkpace.svg + sourceType: api + releaseStage: alpha - name: Google Workspace Admin Reports sourceDefinitionId: ed9dfefa-1bbc-419d-8c5e-4d78f0ef6734 dockerRepository: airbyte/source-google-workspace-admin-reports diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index a330eb401404..28e09f19d102 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -4763,6 +4763,34 @@ - - "client_secret" oauthFlowOutputParameters: - - "refresh_token" +- dockerImage: "airbyte/source-google-webfonts:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/google-webfonts" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Google Webfonts Spec" + type: "object" + required: + - "api_key" + additionalProperties: true + properties: + api_key: + type: "string" + description: "API key is required to access google apis, For getting your's\ + \ goto google console and generate api key for Webfonts" + airbyte_secret: true + sort: + type: "string" + description: "Optional, to find how to sort" + prettyPrint: + type: "string" + description: "Optional, boolean type" + alt: + type: "string" + description: "Optional, Available params- json, media, proto" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-google-workspace-admin-reports:0.1.8" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/google-workspace-admin-reports" diff --git a/airbyte-integrations/connectors/source-google-webfonts/.dockerignore b/airbyte-integrations/connectors/source-google-webfonts/.dockerignore new file mode 100644 index 000000000000..b7c1ebe6c666 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_google_webfonts +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-google-webfonts/Dockerfile b/airbyte-integrations/connectors/source-google-webfonts/Dockerfile new file mode 100644 index 000000000000..56b13b3f8de2 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_google_webfonts ./source_google_webfonts + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-google-webfonts diff --git a/airbyte-integrations/connectors/source-google-webfonts/README.md b/airbyte-integrations/connectors/source-google-webfonts/README.md new file mode 100644 index 000000000000..f0e9d4f2ba72 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/README.md @@ -0,0 +1,103 @@ +# Google Webfonts Source + +This is the repository for the Google Webfonts configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/google-webfonts). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-google-webfonts:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/google-webfonts) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_google_webfonts/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source google-webfonts test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-google-webfonts:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-google-webfonts:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-google-webfonts:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-google-webfonts:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-google-webfonts:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-google-webfonts:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-google-webfonts:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-google-webfonts:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-google-webfonts/__init__.py b/airbyte-integrations/connectors/source-google-webfonts/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-google-webfonts/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-webfonts/acceptance-test-config.yml new file mode 100644 index 000000000000..7e69e954eac4 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/acceptance-test-config.yml @@ -0,0 +1,20 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-google-webfonts:dev +tests: + spec: + - spec_path: "source_google_webfonts/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-google-webfonts/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-google-webfonts/acceptance-test-docker.sh new file mode 100644 index 000000000000..c51577d10690 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-google-webfonts/bootstrap.md b/airbyte-integrations/connectors/source-google-webfonts/bootstrap.md new file mode 100644 index 000000000000..10cfa3b880b5 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/bootstrap.md @@ -0,0 +1,38 @@ +# Google-webfonts + +The connector uses the v1 API documented here: https://developers.google.com/fonts/docs/developer_api . It is +straightforward HTTP REST API with API authentication. + +## API key + +Api key is mandate for this connector to work, It could be generated by a gmail account for free at https://console.cloud.google.com/apis/dashboard. +Just pass the generated API key and optional parameters for establishing the connection. Example:123 + +## Implementation details + +## Setup guide + +### Step 1: Set up Google-webfonts connection + +- Generate an API key (Example: 12345) +- Params (If specific info is needed) +- Available params + - sort: SORT_UNDEFINED, ALPHA, DATE, STYLE, TRENDING, POPULARITY + - alt: json, media or proto + - prettyPrint: boolean + +## Step 2: Generate schema for the endpoint + +### Custom schema is generated and tested with different IDs + +## Step 3: Spec, Secrets, and connector yaml files are configured with reference to the Airbyte documentation. + +## In a nutshell: + +1. Navigate to the Airbyte Open Source dashboard. +2. Set the name for your source. +3. Enter your `api_key`. +5. Enter your config params if needed. (Optional) +6. Click **Set up source**. + + * We use only GET methods, towards the webfonts endpoints which is straightforward \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-google-webfonts/build.gradle b/airbyte-integrations/connectors/source-google-webfonts/build.gradle new file mode 100644 index 000000000000..8bf7fd748729 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_google_webfonts' +} diff --git a/airbyte-integrations/connectors/source-google-webfonts/integration_tests/__init__.py b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-google-webfonts/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-google-webfonts/integration_tests/catalog.json b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/catalog.json new file mode 100644 index 000000000000..6799946a6851 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/catalog.json @@ -0,0 +1,39 @@ +{ + "streams": [ + { + "name": "TODO fix this file", + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": "column1", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "column1": { + "type": "string" + }, + "column2": { + "type": "number" + } + } + } + }, + { + "name": "table1", + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": false, + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "column1": { + "type": "string" + }, + "column2": { + "type": "number" + } + } + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-google-webfonts/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..2875f8637421 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "fonts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-google-webfonts/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/invalid_config.json new file mode 100644 index 000000000000..316264c50d74 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/invalid_config.json @@ -0,0 +1,6 @@ +{ + "api_key": "", + "sort": "", + "prettyPrint": "", + "alt": "" +} diff --git a/airbyte-integrations/connectors/source-google-webfonts/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/sample_config.json new file mode 100644 index 000000000000..93b02698274d --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/integration_tests/sample_config.json @@ -0,0 +1,6 @@ +{ + "api_key": "", + "sort": "SORT_UNDEFINED", + "prettyPrint": "true", + "alt": "json" +} diff --git a/airbyte-integrations/connectors/source-google-webfonts/main.py b/airbyte-integrations/connectors/source-google-webfonts/main.py new file mode 100644 index 000000000000..065faa3891bf --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_google_webfonts import SourceGoogleWebfonts + +if __name__ == "__main__": + source = SourceGoogleWebfonts() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-google-webfonts/requirements.txt b/airbyte-integrations/connectors/source-google-webfonts/requirements.txt new file mode 100644 index 000000000000..0411042aa091 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-google-webfonts/setup.py b/airbyte-integrations/connectors/source-google-webfonts/setup.py new file mode 100644 index 000000000000..f290fc2148d4 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_google_webfonts", + description="Source implementation for Google Webfonts.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/__init__.py b/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/__init__.py new file mode 100644 index 000000000000..4045cfdf0228 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceGoogleWebfonts + +__all__ = ["SourceGoogleWebfonts"] diff --git a/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/google_webfonts.yaml b/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/google_webfonts.yaml new file mode 100644 index 000000000000..61310dc2ac11 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/google_webfonts.yaml @@ -0,0 +1,38 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["items"] + requester: + url_base: "https://webfonts.googleapis.com/v1" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "apikey" + api_token: "{{ config['api_key'] }}" + + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + + fonts_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "fonts" + path: "/webfonts?key={{ config['api_key'] }}&sort={{ config['sort'] or 'SORT_UNDEFINED'}}&prettyPrint={{ config['prettyPrint'] or 'true'}}&alt={{ config['alt'] or 'json'}}" + +streams: + - "*ref(definitions.fonts_stream)" + +check: + stream_names: + - "fonts" diff --git a/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/schemas/fonts.json b/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/schemas/fonts.json new file mode 100644 index 000000000000..9c62b02dc329 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/schemas/fonts.json @@ -0,0 +1,111 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666796406.json", + "title": "Root", + "type": "object", + "properties": { + "kind": { + "$id": "#root/kind", + "title": "Kind", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "items": { + "$id": "#root/items", + "title": "Items", + "type": "array", + "default": [], + "items": { + "$id": "#root/items/items", + "title": "Items", + "type": "object", + "properties": { + "family": { + "$id": "#root/items/items/family", + "title": "Family", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "variants": { + "$id": "#root/items/items/variants", + "title": "Variants", + "type": "array", + "default": [], + "items": { + "$id": "#root/items/items/variants/items", + "title": "Items", + "type": "string", + "default": "", + "pattern": "^.*$" + } + }, + "subsets": { + "$id": "#root/items/items/subsets", + "title": "Subsets", + "type": "array", + "default": [], + "items": { + "$id": "#root/items/items/subsets/items", + "title": "Items", + "type": "string", + "default": "", + "pattern": "^.*$" + } + }, + "version": { + "$id": "#root/items/items/version", + "title": "Version", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "lastModified": { + "$id": "#root/items/items/lastModified", + "title": "Lastmodified", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "files": { + "$id": "#root/items/items/files", + "title": "Files", + "type": "object", + "properties": { + "regular": { + "$id": "#root/items/items/files/regular", + "title": "Regular", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "italic": { + "$id": "#root/items/items/files/italic", + "title": "Italic", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + }, + "category": { + "$id": "#root/items/items/category", + "title": "Category", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "kind": { + "$id": "#root/items/items/kind", + "title": "Kind", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/source.py b/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/source.py new file mode 100644 index 000000000000..353c6e585164 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceGoogleWebfonts(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "google_webfonts.yaml"}) diff --git a/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/spec.yaml b/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/spec.yaml new file mode 100644 index 000000000000..ebc80b05ad5a --- /dev/null +++ b/airbyte-integrations/connectors/source-google-webfonts/source_google_webfonts/spec.yaml @@ -0,0 +1,22 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/google-webfonts +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Google Webfonts Spec + type: object + required: + - api_key + additionalProperties: true + properties: + api_key: + type: string + description: API key is required to access google apis, For getting your's goto google console and generate api key for Webfonts + airbyte_secret: true + sort: + type: string + description: Optional, to find how to sort + prettyPrint: + type: string + description: Optional, boolean type + alt: + type: string + description: Optional, Available params- json, media, proto diff --git a/docs/integrations/README.md b/docs/integrations/README.md index 1cff87839588..afaa4b6ecbec 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -77,6 +77,7 @@ For more information about the grading system, see [Product Release Stages](http | [Google Directory](sources/google-directory.md) | Alpha | Yes | | [Google Search Console](sources/google-search-console.md) | Generally Available | Yes | | [Google Sheets](sources/google-sheets.md) | Generally Available | Yes | +| [Google Webfonts](sources/google-webfonts.md) | Alpha | Yes | | [Google Workspace Admin Reports](sources/google-workspace-admin-reports.md) | Alpha | Yes | | [Greenhouse](sources/greenhouse.md) | Beta | Yes | | [Gutendex](sources/gutendex.md) | Alpha | No | diff --git a/docs/integrations/sources/google-webfonts.md b/docs/integrations/sources/google-webfonts.md new file mode 100644 index 000000000000..b3a5e69ab01e --- /dev/null +++ b/docs/integrations/sources/google-webfonts.md @@ -0,0 +1,68 @@ +# Google-webfonts + +This page contains the setup guide and reference information for the [Google-webfonts](https://developers.google.com/fonts/docs/developer_api) source connector. + +## Prerequisites + +Api key is mandate for this connector to work, It could be generated by a gmail account for free at https://console.cloud.google.com/apis/dashboard. +Just pass the generated API key and optional parameters for establishing the connection. Example:123 + +## Setup guide + +### Step 1: Set up Google-webfonts connection + +- Generate an API key (Example: 12345) +- Params (If specific info is needed) +- Available params + - sort: SORT_UNDEFINED, ALPHA, DATE, STYLE, TRENDING, POPULARITY + - alt: json, media or proto + - prettyPrint: boolean + +## Step 2: Set up the Google-webfonts connector in Airbyte + +### For Airbyte Cloud: + +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click **Sources**. In the top-right corner, click **+new source**. +3. On the Set up the source page, enter the name for the Google-webfonts connector and select **Google-webfonts** from the Source type dropdown. +4. Enter your `api_key`. +5. Enter the params configuration if needed. Supported params are: sort, alt, prettyPrint (Optional) +6. Click **Set up source**. + +### For Airbyte OSS: + +1. Navigate to the Airbyte Open Source dashboard. +2. Set the name for your source. +3. Enter your `api_key`. +5. Enter the params configuration if needed. Supported params are: sort, alt, prettyPrint (Optional) +6. Click **Set up source**. + +## Supported sync modes + +The Google-webfonts source connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): + +| Feature | Supported? | +| :---------------------------- | :--------- | +| Full Refresh Sync | Yes | +| Incremental Sync | No | +| Replicate Incremental Deletes | No | +| SSL connection | Yes | +| Namespaces | No | + +## Supported Streams + +- Webfonts (Single stream API) + +## API method example + +GET https://webfonts.googleapis.com/v1/webfonts?key=<1234567>&sort=SORT_UNDEFINED&prettyPrint=true&alt=json + +## Performance considerations + +Google Webfont's [API reference](https://developers.google.com/fonts/docs/developer_api) has v1 at present and v2 is at development. The connector as default uses v1. + +## Changelog + +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :----------------------------------------------------- | :------------- | +| 0.1.0 | 2022-10-26 | [Init](https://github.com/airbytehq/airbyte/pull/18496)| Initial commit | \ No newline at end of file From d711781e49c16c95acd8751797753838d86c444b Mon Sep 17 00:00:00 2001 From: Davin Chia Date: Fri, 28 Oct 2022 09:28:58 -0700 Subject: [PATCH 10/13] Clean up build.gradle. (#18555) Upstream has fixed this bug so we no longer need to host it internally. Remove this to clean up build.gradle. --- build.gradle | 4 ---- 1 file changed, 4 deletions(-) diff --git a/build.gradle b/build.gradle index 0c4c7f42b41d..e0a1bc11ce2f 100644 --- a/build.gradle +++ b/build.gradle @@ -283,10 +283,6 @@ subprojects { subproj -> repositories { mavenCentral() - maven { - // TODO(Issue-4915): Remove this when upstream is merged in. - url 'https://airbyte.mycloudrepo.io/public/repositories/airbyte-public-jars/' - } maven { url 'https://jitpack.io' } From c219dab63c210804e38ad63bb69d398f4e756eff Mon Sep 17 00:00:00 2001 From: Cirdes Date: Fri, 28 Oct 2022 13:46:29 -0300 Subject: [PATCH 11/13] =?UTF-8?q?=F0=9F=8E=89=20New=20Destination:=20Types?= =?UTF-8?q?ense=20(#18349)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Initial boilerplate * 🎉 New Destination: Typesense * remove .java-version * fix doc * add typesense to dest def * add release stage * add requirement to main * auto-bump connector version * add changelog Co-authored-by: marcosmarxm Co-authored-by: Marcos Marx Co-authored-by: Octavia Squidington III --- .../seed/destination_definitions.yaml | 6 + .../resources/seed/destination_specs.yaml | 45 +++++++ .../destination-typesense/.dockerignore | 5 + .../destination-typesense/Dockerfile | 38 ++++++ .../destination-typesense/README.md | 123 ++++++++++++++++++ .../destination-typesense/build.gradle | 8 ++ .../destination_typesense/__init__.py | 8 ++ .../destination_typesense/destination.py | 61 +++++++++ .../destination_typesense/spec.json | 46 +++++++ .../destination_typesense/writer.py | 35 +++++ .../integration_tests/integration_test.py | 95 ++++++++++++++ .../connectors/destination-typesense/main.py | 11 ++ .../destination-typesense/requirements.txt | 1 + .../connectors/destination-typesense/setup.py | 23 ++++ .../unit_tests/unit_test.py | 22 ++++ docs/integrations/README.md | 3 +- docs/integrations/destinations/typesense.md | 39 ++++++ 17 files changed, 568 insertions(+), 1 deletion(-) create mode 100644 airbyte-integrations/connectors/destination-typesense/.dockerignore create mode 100644 airbyte-integrations/connectors/destination-typesense/Dockerfile create mode 100644 airbyte-integrations/connectors/destination-typesense/README.md create mode 100644 airbyte-integrations/connectors/destination-typesense/build.gradle create mode 100644 airbyte-integrations/connectors/destination-typesense/destination_typesense/__init__.py create mode 100644 airbyte-integrations/connectors/destination-typesense/destination_typesense/destination.py create mode 100644 airbyte-integrations/connectors/destination-typesense/destination_typesense/spec.json create mode 100644 airbyte-integrations/connectors/destination-typesense/destination_typesense/writer.py create mode 100644 airbyte-integrations/connectors/destination-typesense/integration_tests/integration_test.py create mode 100644 airbyte-integrations/connectors/destination-typesense/main.py create mode 100644 airbyte-integrations/connectors/destination-typesense/requirements.txt create mode 100644 airbyte-integrations/connectors/destination-typesense/setup.py create mode 100644 airbyte-integrations/connectors/destination-typesense/unit_tests/unit_test.py create mode 100644 docs/integrations/destinations/typesense.md diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 7a682eded8ee..e6561b8854ed 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -329,6 +329,12 @@ documentationUrl: https://docs.airbyte.com/integrations/destinations/tidb icon: tidb.svg releaseStage: alpha +- name: Typesense + destinationDefinitionId: 36be8dc6-9851-49af-b776-9d4c30e4ab6a + dockerRepository: airbyte/destination-typesense + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/destinations/typesense + releaseStage: alpha - name: YugabyteDB destinationDefinitionId: 2300fdcf-a532-419f-9f24-a014336e7966 dockerRepository: airbyte/destination-yugabytedb diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 8c50468aa44c..12f732b856a6 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -6145,6 +6145,51 @@ supported_destination_sync_modes: - "overwrite" - "append" +- dockerImage: "airbyte/destination-typesense:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/destinations/typesense" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Destination Typesense" + type: "object" + required: + - "api_key" + - "host" + additionalProperties: false + properties: + api_key: + title: "API Key" + type: "string" + description: "Typesense API Key" + order: 0 + host: + title: "Host" + type: "string" + description: "Hostname of the Typesense instance without protocol." + order: 1 + port: + title: "Port" + type: "string" + description: "Port of the Typesense instance. Ex: 8108, 80, 443. Default\ + \ is 443" + order: 2 + protocol: + title: "Protocol" + type: "string" + description: "Protocol of the Typesense instance. Ex: http or https. Default\ + \ is https" + order: 3 + batch_size: + title: "Batch size" + type: "string" + description: "How many documents should be imported together. Default 1000" + order: 4 + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "overwrite" + - "append" - dockerImage: "airbyte/destination-yugabytedb:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/yugabytedb" diff --git a/airbyte-integrations/connectors/destination-typesense/.dockerignore b/airbyte-integrations/connectors/destination-typesense/.dockerignore new file mode 100644 index 000000000000..d72007ad7e34 --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/.dockerignore @@ -0,0 +1,5 @@ +* +!Dockerfile +!main.py +!destination_typesense +!setup.py diff --git a/airbyte-integrations/connectors/destination-typesense/Dockerfile b/airbyte-integrations/connectors/destination-typesense/Dockerfile new file mode 100644 index 000000000000..0f5659af70c9 --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY destination_typesense ./destination_typesense + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/destination-typesense diff --git a/airbyte-integrations/connectors/destination-typesense/README.md b/airbyte-integrations/connectors/destination-typesense/README.md new file mode 100644 index 000000000000..01c677ffcf05 --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/README.md @@ -0,0 +1,123 @@ +# Typesense Destination + +This is the repository for the Typesense destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/typesense). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:destination-typesense:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/typesense) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_typesense/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination typesense test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/destination-typesense:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:destination-typesense:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-typesense:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-typesense:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-typesense:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all destination connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Coming soon: + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:destination-typesense:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:destination-typesense:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-typesense/build.gradle b/airbyte-integrations/connectors/destination-typesense/build.gradle new file mode 100644 index 000000000000..01ad66a130f7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/build.gradle @@ -0,0 +1,8 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' +} + +airbytePython { + moduleDirectory 'destination_typesense' +} diff --git a/airbyte-integrations/connectors/destination-typesense/destination_typesense/__init__.py b/airbyte-integrations/connectors/destination-typesense/destination_typesense/__init__.py new file mode 100644 index 000000000000..cc3d48181b6a --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/destination_typesense/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationTypesense + +__all__ = ["DestinationTypesense"] diff --git a/airbyte-integrations/connectors/destination-typesense/destination_typesense/destination.py b/airbyte-integrations/connectors/destination-typesense/destination_typesense/destination.py new file mode 100644 index 000000000000..ac6eb889d319 --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/destination_typesense/destination.py @@ -0,0 +1,61 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from logging import Logger +from typing import Any, Iterable, Mapping + +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type +from destination_typesense.writer import TypesenseWriter +from typesense import Client + + +def get_client(config: Mapping[str, Any]) -> Client: + api_key = config.get("api_key") + host = config.get("host") + port = config.get("port") or "8108" + protocol = config.get("protocol") or "https" + + client = Client({"api_key": api_key, "nodes": [{"host": host, "port": port, "protocol": protocol}], "connection_timeout_seconds": 2}) + + return client + + +class DestinationTypesense(Destination): + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + client = get_client(config=config) + + for configured_stream in configured_catalog.streams: + steam_name = configured_stream.stream.name + if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: + try: + client.collections[steam_name].delete() + except Exception: + pass + client.collections.create({"name": steam_name, "fields": [{"name": ".*", "type": "auto"}]}) + + writer = TypesenseWriter(client, steam_name, config.get("batch_size")) + for message in input_messages: + if message.type == Type.STATE: + writer.flush() + yield message + elif message.type == Type.RECORD: + writer.queue_write_operation(message.record.data) + else: + continue + writer.flush() + + def check(self, logger: Logger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + try: + client = get_client(config=config) + client.collections.create({"name": "_airbyte", "fields": [{"name": "title", "type": "string"}]}) + client.collections["_airbyte"].documents.create({"id": "1", "title": "The Hunger Games"}) + client.collections["_airbyte"].documents["1"].retrieve() + client.collections["_airbyte"].delete() + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + except Exception as e: + return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") diff --git a/airbyte-integrations/connectors/destination-typesense/destination_typesense/spec.json b/airbyte-integrations/connectors/destination-typesense/destination_typesense/spec.json new file mode 100644 index 000000000000..ee2cb5c76fab --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/destination_typesense/spec.json @@ -0,0 +1,46 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/typesense", + "supported_destination_sync_modes": ["overwrite", "append"], + "supportsIncremental": true, + "supportsDBT": false, + "supportsNormalization": false, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Destination Typesense", + "type": "object", + "required": ["api_key", "host"], + "additionalProperties": false, + "properties": { + "api_key": { + "title": "API Key", + "type": "string", + "description": "Typesense API Key", + "order": 0 + }, + "host": { + "title": "Host", + "type": "string", + "description": "Hostname of the Typesense instance without protocol.", + "order": 1 + }, + "port": { + "title": "Port", + "type": "string", + "description": "Port of the Typesense instance. Ex: 8108, 80, 443. Default is 443", + "order": 2 + }, + "protocol": { + "title": "Protocol", + "type": "string", + "description": "Protocol of the Typesense instance. Ex: http or https. Default is https", + "order": 3 + }, + "batch_size": { + "title": "Batch size", + "type": "string", + "description": "How many documents should be imported together. Default 1000", + "order": 4 + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-typesense/destination_typesense/writer.py b/airbyte-integrations/connectors/destination-typesense/destination_typesense/writer.py new file mode 100644 index 000000000000..904bfddca826 --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/destination_typesense/writer.py @@ -0,0 +1,35 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from collections.abc import Mapping +from logging import getLogger +from uuid import uuid4 + +from typesense import Client + +logger = getLogger("airbyte") + + +class TypesenseWriter: + write_buffer = [] + + def __init__(self, client: Client, steam_name: str, batch_size: int = 1000): + self.client = client + self.steam_name = steam_name + self.batch_size = batch_size + + def queue_write_operation(self, data: Mapping): + random_key = str(uuid4()) + data_with_id = data if "id" in data else {**data, "id": random_key} + self.write_buffer.append(data_with_id) + if len(self.write_buffer) == self.batch_size: + self.flush() + + def flush(self): + buffer_size = len(self.write_buffer) + if buffer_size == 0: + return + logger.info(f"flushing {buffer_size} records") + self.client.collections[self.steam_name].documents.import_(self.write_buffer) + self.write_buffer.clear() diff --git a/airbyte-integrations/connectors/destination-typesense/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-typesense/integration_tests/integration_test.py new file mode 100644 index 000000000000..fd603bb6991e --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/integration_tests/integration_test.py @@ -0,0 +1,95 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import json +from logging import getLogger +from typing import Any, Dict, Mapping + +import pytest +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + Status, + SyncMode, + Type, +) +from destination_typesense.destination import DestinationTypesense, get_client +from typesense import Client + + +@pytest.fixture(name="config") +def config_fixture() -> Mapping[str, Any]: + with open("secrets/config.json", "r") as f: + return json.loads(f.read()) + + +@pytest.fixture(name="configured_catalog") +def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: + stream_schema = {"type": "object", "properties": {"col1": {"type": "str"}, "col2": {"type": "integer"}}} + + overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="_airbyte", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + + return ConfiguredAirbyteCatalog(streams=[overwrite_stream]) + + +@pytest.fixture(autouse=True) +def teardown(config: Mapping): + yield + client = get_client(config=config) + try: + client.collections["_airbyte"].delete() + except Exception: + pass + + +@pytest.fixture(name="client") +def client_fixture(config) -> Client: + client = get_client(config=config) + client.collections.create({"name": "_airbyte", "fields": [{"name": ".*", "type": "auto"}]}) + return client + + +def test_check_valid_config(config: Mapping): + outcome = DestinationTypesense().check(getLogger("airbyte"), config) + assert outcome.status == Status.SUCCEEDED + + +def test_check_invalid_config(): + outcome = DestinationTypesense().check(getLogger("airbyte"), {"api_key": "not_a_real_key", "host": "https://www.fake.com"}) + assert outcome.status == Status.FAILED + + +def _state(data: Dict[str, Any]) -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=data)) + + +def _record(stream: str, str_value: str, int_value: int) -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, record=AirbyteRecordMessage(stream=stream, data={"str_col": str_value, "int_col": int_value}, emitted_at=0) + ) + + +def records_count(client: Client) -> int: + documents_results = client.index("_airbyte").get_documents() + return documents_results.total + + +def test_write(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog, client: Client): + overwrite_stream = configured_catalog.streams[0].stream.name + first_state_message = _state({"state": "1"}) + first_record_chunk = [_record(overwrite_stream, str(i), i) for i in range(2)] + + destination = DestinationTypesense() + list(destination.write(config, configured_catalog, [*first_record_chunk, first_state_message])) + collection = client.collections["_airbyte"].retrieve() + assert collection["num_documents"] == 2 diff --git a/airbyte-integrations/connectors/destination-typesense/main.py b/airbyte-integrations/connectors/destination-typesense/main.py new file mode 100644 index 000000000000..3d97913108e3 --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_typesense import DestinationTypesense + +if __name__ == "__main__": + DestinationTypesense().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-typesense/requirements.txt b/airbyte-integrations/connectors/destination-typesense/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-typesense/setup.py b/airbyte-integrations/connectors/destination-typesense/setup.py new file mode 100644 index 000000000000..5c7542d9a742 --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/setup.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk", "typesense>=0.14.0"] + +TEST_REQUIREMENTS = ["pytest~=6.1", "typesense>=0.14.0"] + +setup( + name="destination_typesense", + description="Destination implementation for Typesense.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-typesense/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-typesense/unit_tests/unit_test.py new file mode 100644 index 000000000000..991e3cf2d526 --- /dev/null +++ b/airbyte-integrations/connectors/destination-typesense/unit_tests/unit_test.py @@ -0,0 +1,22 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import patch + +from destination_typesense.writer import TypesenseWriter + + +@patch("typesense.Client") +def test_queue_write_operation(client): + writer = TypesenseWriter(client, "steam_name") + writer.queue_write_operation({"a": "a"}) + assert len(writer.write_buffer) == 1 + + +@patch("typesense.Client") +def test_flush(client): + writer = TypesenseWriter(client, "steam_name") + writer.queue_write_operation({"a": "a"}) + writer.flush() + client.collections.__getitem__.assert_called_once_with("steam_name") diff --git a/docs/integrations/README.md b/docs/integrations/README.md index afaa4b6ecbec..a67b98dc9590 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -244,4 +244,5 @@ For more information about the grading system, see [Product Release Stages](http | [SQLite](destinations/sqlite.md) | Alpha | No | | [Streamr](destinations/streamr.md) | Alpha | No | | [TiDB](destinations/tidb.md) | Alpha | No | -| [Yugabytedb](destinations/yugabytedb.md) | Alpha | No | \ No newline at end of file +| [Typesense](destinations/typesense.md) | Alpha | No | +| [Yugabytedb](destinations/yugabytedb.md) | Alpha | No | diff --git a/docs/integrations/destinations/typesense.md b/docs/integrations/destinations/typesense.md new file mode 100644 index 000000000000..34b72773250e --- /dev/null +++ b/docs/integrations/destinations/typesense.md @@ -0,0 +1,39 @@ +# Typesense + +## Overview + +The Airbyte Typesense destination allows you to sync data to Airbyte.Typesense is a modern, privacy-friendly, open source search engine built from the ground up using cutting-edge search algorithms, that take advantage of the latest advances in hardware capabilities. + +### Sync overview + +Using overwrite sync, the [auto schema detection](https://typesense.org/docs/0.23.1/api/collections.html#with-auto-schema-detection) is used and all the fields in a document are automatically indexed for searching and filtering + +With append mode, you have to create the collection first and can use [pre-defined schema](https://typesense.org/docs/0.23.1/api/collections.html#with-pre-defined-schema) that gives you fine-grained control over your document fields. + +#### Output schema + +Each stream will be output into its own collection in Typesense. If an id column is not provided, it will be generated. + +#### Features + +| Feature | Supported?\(Yes/No\) | Notes | +| :---------------------------- | :------------------- | :------------------------------------------------------------------------------------------- | +| Full Refresh Sync | Yes | | +| Incremental - Append Sync | Yes | | +| Incremental - Deduped History | No | As this connector does not support dbt, we don't support this sync mode on this destination. | +| Namespaces | No | | + +## Getting started + +### Requirements + +To use the Typesense destination, you'll need an existing Typesense instance. You can learn about how to create one in the [Typesense docs](https://typesense.org/docs/guide/install-typesense.html). + +### Setup guide + +The setup only requires two fields. First is the `host` which is the address at which Typesense can be reached. The second piece of information is the API key. + +## Changelog + +| Version | Date | Pull Request | Subject | +| 0.1.0 | 2022-10-28 | [18349](https://github.com/airbytehq/airbyte/pull/18349) | New Typesense destination | From 683a3a3344522b5f574157d366fea9755a28d45f Mon Sep 17 00:00:00 2001 From: sbrickel-nimble <113671803+sbrickel-nimble@users.noreply.github.com> Date: Fri, 28 Oct 2022 19:13:14 +0200 Subject: [PATCH 12/13] =?UTF-8?q?=F0=9F=8E=89=20New=20Source:=20Waiteraid?= =?UTF-8?q?=20[low-code=20cdk]=20(#18165)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * added source waiteraid * 🎉 New Source: Waiteraid * add searchBookings stream * add P/R number * add SUMMARY entry * add docs/integrations/README.md entry * add builds.md entry * add docs to each endpoint * fix schema ~ * Update airbyte-integrations/builds.md added web address Co-authored-by: Marcos Marx * Delete catalog.json * Delete TODO.md * Update spec.yaml * add waiteraid to source def seed * auto-bump connector version Co-authored-by: Sebastian Brickel Co-authored-by: Marcos Marx Co-authored-by: marcosmarxm Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 7 + .../src/main/resources/seed/source_specs.yaml | 34 ++++ airbyte-integrations/builds.md | 1 + .../connectors/source-waiteraid/.dockerignore | 6 + .../connectors/source-waiteraid/Dockerfile | 38 +++++ .../connectors/source-waiteraid/README.md | 129 +++++++++++++++ .../connectors/source-waiteraid/__init__.py | 3 + .../acceptance-test-config.yml | 30 ++++ .../acceptance-test-docker.sh | 16 ++ .../connectors/source-waiteraid/bootstrap.md | 11 ++ .../connectors/source-waiteraid/build.gradle | 9 ++ .../integration_tests/__init__.py | 3 + .../integration_tests/abnormal_state.json | 5 + .../integration_tests/acceptance.py | 16 ++ .../integration_tests/configured_catalog.json | 13 ++ .../integration_tests/invalid_config.json | 1 + .../integration_tests/sample_config.json | 3 + .../integration_tests/sample_state.json | 5 + .../connectors/source-waiteraid/main.py | 13 ++ .../source-waiteraid/requirements.txt | 2 + .../connectors/source-waiteraid/setup.py | 29 ++++ .../source_waiteraid/__init__.py | 8 + .../source_waiteraid/schemas/booking.json | 151 ++++++++++++++++++ .../source_waiteraid/source.py | 18 +++ .../source_waiteraid/spec.yaml | 28 ++++ .../source_waiteraid/waiteraid.yaml | 48 ++++++ docs/integrations/README.md | 1 + docs/integrations/sources/waiteraid.md | 57 +++++++ 28 files changed, 685 insertions(+) create mode 100644 airbyte-integrations/connectors/source-waiteraid/.dockerignore create mode 100644 airbyte-integrations/connectors/source-waiteraid/Dockerfile create mode 100644 airbyte-integrations/connectors/source-waiteraid/README.md create mode 100644 airbyte-integrations/connectors/source-waiteraid/__init__.py create mode 100644 airbyte-integrations/connectors/source-waiteraid/acceptance-test-config.yml create mode 100644 airbyte-integrations/connectors/source-waiteraid/acceptance-test-docker.sh create mode 100644 airbyte-integrations/connectors/source-waiteraid/bootstrap.md create mode 100644 airbyte-integrations/connectors/source-waiteraid/build.gradle create mode 100644 airbyte-integrations/connectors/source-waiteraid/integration_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-waiteraid/integration_tests/abnormal_state.json create mode 100644 airbyte-integrations/connectors/source-waiteraid/integration_tests/acceptance.py create mode 100644 airbyte-integrations/connectors/source-waiteraid/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-waiteraid/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/source-waiteraid/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/source-waiteraid/integration_tests/sample_state.json create mode 100644 airbyte-integrations/connectors/source-waiteraid/main.py create mode 100644 airbyte-integrations/connectors/source-waiteraid/requirements.txt create mode 100644 airbyte-integrations/connectors/source-waiteraid/setup.py create mode 100644 airbyte-integrations/connectors/source-waiteraid/source_waiteraid/__init__.py create mode 100644 airbyte-integrations/connectors/source-waiteraid/source_waiteraid/schemas/booking.json create mode 100644 airbyte-integrations/connectors/source-waiteraid/source_waiteraid/source.py create mode 100644 airbyte-integrations/connectors/source-waiteraid/source_waiteraid/spec.yaml create mode 100644 airbyte-integrations/connectors/source-waiteraid/source_waiteraid/waiteraid.yaml create mode 100644 docs/integrations/sources/waiteraid.md diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 37d226008ecd..33a126256226 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -1413,6 +1413,13 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/elasticsearch sourceType: api releaseStage: alpha +- name: Waiteraid + sourceDefinitionId: 03a53b13-794a-4d6b-8544-3b36ed8f3ce4 + dockerRepository: airbyte/source-waiteraid + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/waiteraid + sourceType: api + releaseStage: alpha - name: Yandex Metrica sourceDefinitionId: 7865dce4-2211-4f6a-88e5-9d0fe161afe7 dockerRepository: airbyte/source-yandex-metrica diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 28e09f19d102..de2d93a704ed 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -13861,6 +13861,40 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-waiteraid:0.1.0" + spec: + documentationUrl: "https://docsurl.com" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Waiteraid Spec" + type: "object" + required: + - "start_date" + - "auth_hash" + - "restid" + additionalProperties: true + properties: + start_date: + title: "Start Date" + type: "string" + description: "Start getting data from that date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + auth_hash: + title: "Authentication Hash" + type: "string" + description: "Your WaiterAid API key, obtained from API request with Username\ + \ and Password" + airbyte_secret: true + restid: + title: "Restaurant ID" + type: "string" + description: "Your WaiterAid restaurant id from API request to getRestaurants" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-yandex-metrica:0.1.0" spec: documentationUrl: "https://docsurl.com" diff --git a/airbyte-integrations/builds.md b/airbyte-integrations/builds.md index 1c8051245533..ab4c3417064d 100644 --- a/airbyte-integrations/builds.md +++ b/airbyte-integrations/builds.md @@ -121,6 +121,7 @@ | Twilio | [![source-twilio](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-twilio%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-twilio) | | Typeform | [![source-typeform](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-typeform%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-typeform) | | US Census | [![source-us-census](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-us-census%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-us-census) | +| Waiteraid | [![source-waiteraid]()](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-waiteraid%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-waiteraid) | | Whisky Hunter | [![source-whisky-hunter](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-whisky-hunter%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-whisky-hunter) | | Wrike | [![source-wrike](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-wrike%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-wrike) | | YouTube Analytics | [![source-youtube-analytics](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-youtube-analytics%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-youtube-analytics) | diff --git a/airbyte-integrations/connectors/source-waiteraid/.dockerignore b/airbyte-integrations/connectors/source-waiteraid/.dockerignore new file mode 100644 index 000000000000..a89f0645c28d --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_waiteraid +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-waiteraid/Dockerfile b/airbyte-integrations/connectors/source-waiteraid/Dockerfile new file mode 100644 index 000000000000..a0d4d3a3c919 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_waiteraid ./source_waiteraid + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-waiteraid diff --git a/airbyte-integrations/connectors/source-waiteraid/README.md b/airbyte-integrations/connectors/source-waiteraid/README.md new file mode 100644 index 000000000000..a4f376768025 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/README.md @@ -0,0 +1,129 @@ +# Waiteraid Source + +This is the repository for the Waiteraid configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/waiteraid). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-waiteraid:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/waiteraid) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_waiteraid/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source waiteraid test creds` +and place them into `secrets/config.json`. +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-waiteraid:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-waiteraid:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-waiteraid:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-waiteraid:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-waiteraid:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-waiteraid:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-waiteraid:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-waiteraid:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-waiteraid/__init__.py b/airbyte-integrations/connectors/source-waiteraid/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-waiteraid/acceptance-test-config.yml b/airbyte-integrations/connectors/source-waiteraid/acceptance-test-config.yml new file mode 100644 index 000000000000..c5d65168c7ef --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/acceptance-test-config.yml @@ -0,0 +1,30 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-waiteraid:dev +tests: + spec: + - spec_path: "source_waiteraid/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file + # expect_records: + # path: "integration_tests/expected_records.txt" + # extra_fields: no + # exact_order: no + # extra_records: yes + #incremental: # TODO if your connector does not implement incremental sync, remove this block + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" + # future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-waiteraid/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-waiteraid/acceptance-test-docker.sh new file mode 100644 index 000000000000..c51577d10690 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-waiteraid/bootstrap.md b/airbyte-integrations/connectors/source-waiteraid/bootstrap.md new file mode 100644 index 000000000000..a92233214a44 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/bootstrap.md @@ -0,0 +1,11 @@ +## Streams + +Waiteraid is a REST API. Connector has the following streams, and all of them support full refresh only. + +* [Bookings](https://app.waiteraid.com/api-docs/index.html#api_get_bookings) + +## Authentication +Waiteraid API offers two types of [authentication methods](https://app.waiteraid.com/api-docs/index.html#auth_call). + +* API Keys - Keys are passed using HTTP Basic auth. +* Username and Password - Not supported by this connector. diff --git a/airbyte-integrations/connectors/source-waiteraid/build.gradle b/airbyte-integrations/connectors/source-waiteraid/build.gradle new file mode 100644 index 000000000000..6ea94362d508 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_waiteraid' +} diff --git a/airbyte-integrations/connectors/source-waiteraid/integration_tests/__init__.py b/airbyte-integrations/connectors/source-waiteraid/integration_tests/__init__.py new file mode 100644 index 000000000000..1100c1c58cf5 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-waiteraid/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-waiteraid/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..e6ccaa81eced --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "booking": { + "date": "2999-12-31" + } +} diff --git a/airbyte-integrations/connectors/source-waiteraid/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-waiteraid/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-waiteraid/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-waiteraid/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..7b8975e6ceac --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "booking", + "json_schema": {}, + "supported_sync_modes": ["full_refresh","incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-waiteraid/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-waiteraid/integration_tests/invalid_config.json new file mode 100644 index 000000000000..1f9795718747 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/integration_tests/invalid_config.json @@ -0,0 +1 @@ +{"start_date": "2022-09-01", "auth_hash": "1nval1dk3y", "restid": "666"} diff --git a/airbyte-integrations/connectors/source-waiteraid/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-waiteraid/integration_tests/sample_config.json new file mode 100644 index 000000000000..ecc4913b84c7 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "fix-me": "TODO" +} diff --git a/airbyte-integrations/connectors/source-waiteraid/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-waiteraid/integration_tests/sample_state.json new file mode 100644 index 000000000000..49e5c722434b --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "booking": { + "date": "2022-10-01" + } +} diff --git a/airbyte-integrations/connectors/source-waiteraid/main.py b/airbyte-integrations/connectors/source-waiteraid/main.py new file mode 100644 index 000000000000..c176f331c485 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_waiteraid import SourceWaiteraid + +if __name__ == "__main__": + source = SourceWaiteraid() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-waiteraid/requirements.txt b/airbyte-integrations/connectors/source-waiteraid/requirements.txt new file mode 100644 index 000000000000..0411042aa091 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-waiteraid/setup.py b/airbyte-integrations/connectors/source-waiteraid/setup.py new file mode 100644 index 000000000000..bb67ca6ef48a --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_waiteraid", + description="Source implementation for Waiteraid.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/__init__.py b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/__init__.py new file mode 100644 index 000000000000..0730ddfa3771 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceWaiteraid + +__all__ = ["SourceWaiteraid"] diff --git a/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/schemas/booking.json b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/schemas/booking.json new file mode 100644 index 000000000000..ec922ebe34dd --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/schemas/booking.json @@ -0,0 +1,151 @@ +{ + "type": "object", + "properties": { + "id": { + "type": ["null", "number"] + }, + "amount": { + "type": ["null", "number"] + }, + "children_amount": { + "type": ["null", "number"] + }, + "placed": { + "type": ["null", "number"] + }, + "placed_manually": { + "type": ["null", "number"] + }, + "start": { + "type": ["null", "number"] + }, + "end": { + "type": ["null", "number"] + }, + "length": { + "type": ["null", "number"] + }, + "status": { + "type": ["null", "string"] + }, + "arrived": { + "type": ["null", "number"] + }, + "all_seated": { + "type": ["null", "number"] + }, + "guest_left": { + "type": ["null", "number"] + }, + "comment": { + "type": ["null", "string"] + }, + "confirmed": { + "type": ["null", "number"] + }, + "waitinbar": { + "type": ["null", "number"] + }, + "internet_booking": { + "type": ["null", "number"] + }, + "internet_booking_confirmed": { + "type": ["null", "number"] + }, + "paid": { + "type": ["null", "number"] + }, + "langid": { + "type": ["null", "number"] + }, + "meal": { + "type": ["null", "string"] + }, + "tables": { + "type": ["null", "number"] + }, + "meal_abbr": { + "type": ["null", "string"] + }, + "table_ids": { + "type": ["null", "number"] + }, + "products": { + "type": ["null", "number"] + }, + "waitinlist": { + "type": ["null", "number"] + }, + "date": { + "type": ["null", "string"] + }, + "time": { + "type": ["null", "string"] + }, + "guest": { + "type": "object", + "properties": { + "id": { + "type": ["null", "number"] + }, + "firstname": { + "type": ["null", "string"] + }, + "lastname": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "address": { + "type": ["null", "string"] + }, + "postalcode": { + "type": ["null", "string"] + }, + "city": { + "type": ["null", "string"] + }, + "company": { + "type": ["null", "string"] + }, + "telephone": { + "type": ["null", "string"] + }, + "mobile": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "comment": { + "type": ["null", "string"] + }, + "other_contact": { + "type": ["null", "string"] + }, + "restaurant_newsletter": { + "type": ["null", "boolean"] + } + } + }, + "booking_date": { + "type": ["null", "number"] + }, + "payStarted": { + "type": ["null", "boolean"] + }, + "payClosed": { + "type": ["null", "boolean"] + }, + "payCharged": { + "type": ["null", "boolean"] + }, + "payActivated": { + "type": ["null", "boolean"] + }, + "has_message": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/source.py b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/source.py new file mode 100644 index 000000000000..7e2b4027a8ed --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceWaiteraid(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "waiteraid.yaml"}) diff --git a/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/spec.yaml b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/spec.yaml new file mode 100644 index 000000000000..7f00092a24d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/spec.yaml @@ -0,0 +1,28 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Waiteraid Spec + type: object + required: + - start_date + - auth_hash + - restid + additionalProperties: true + properties: + start_date: + title: Start Date + type: string + description: Start getting data from that date. + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + examples: + - YYYY-MM-DD + auth_hash: + title: Authentication Hash + type: string + description: Your WaiterAid API key, obtained from API request with Username and Password + airbyte_secret: true + restid: + title: Restaurant ID + type: string + description: Your WaiterAid restaurant id from API request to getRestaurants + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/waiteraid.yaml b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/waiteraid.yaml new file mode 100644 index 000000000000..123c84e0f031 --- /dev/null +++ b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/waiteraid.yaml @@ -0,0 +1,48 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://app.waiteraid.com" + http_method: "POST" + request_options_provider: + request_parameters: + date: "{{ config['start_date'] }}" + auth_hash: "{{ config['auth_hash'] }}" + restid: "{{ config['restid'] }}" + stream_slicer: + type: "DatetimeStreamSlicer" + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%d" + end_datetime: + datetime: "{{ now_utc() }}" + datetime_format: "%Y-%m-%d %H:%M:%S.%f" + step: "1d" + datetime_format: "%Y-%m-%d" + cursor_field: "{{ options['stream_cursor_field'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + booking_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "booking" + path: "/wa-api/searchBooking" + stream_cursor_field: "date" + +streams: + - "*ref(definitions.booking_stream)" + +check: + stream_names: + - "booking" diff --git a/docs/integrations/README.md b/docs/integrations/README.md index a67b98dc9590..91cb1d75cb6b 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -181,6 +181,7 @@ For more information about the grading system, see [Product Release Stages](http | [Typeform](sources/typeform.md) | Alpha | Yes | | [US Census](sources/us-census.md) | Alpha | Yes | | [VictorOps](sources/victorops.md) | Alpha | No | +| [Waiteraid](sources/waiteraid.md) | Alpha | Yes | | [Webflow](sources/webflow.md ) | Alpha | Yes | | [Whisky Hunter](sources/whisky-hunter.md ) | Alpha | No | | [WooCommerce](sources/woocommerce.md) | Alpha | No | diff --git a/docs/integrations/sources/waiteraid.md b/docs/integrations/sources/waiteraid.md new file mode 100644 index 000000000000..7b0411ac7fa2 --- /dev/null +++ b/docs/integrations/sources/waiteraid.md @@ -0,0 +1,57 @@ +# Waiteraid + +This page contains the setup guide and reference information for the Waiteraid source connector. + +## Prerequisites + +You can find or create authentication tokens within [Waiteraid](https://app.waiteraid.com/api-docs/index.html#auth_call). + +## Setup guide +## Step 1: Set up the Waiteraid connector in Airbyte + +### For Airbyte Cloud: + +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click **Sources**. In the top-right corner, click **+new source**. +3. On the Set up the source page, enter the name for the Waiteraid connector and select **Waiteraid** from the Source type dropdown. +4. Enter your `auth_token` - Waiteraid Authentication Token. +5. Enter your `restaurant ID` - The Waiteraid ID of the Restaurant you wanto sync. +6. Click **Set up source**. + +### For Airbyte OSS: + +1. Navigate to the Airbyte Open Source dashboard. +2. Set the name for your source. +3. Enter your `auth_token` - Waiteraid Authentication Token. +4. Enter your `restaurant ID` - The Waiteraid ID of the Restaurant you wanto sync. +5. Click **Set up source**. + +## Supported sync modes + +The Waiteraid source connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): + +| Feature | Supported? | +| :---------------- | :--------- | +| Full Refresh Sync | Yes | +| Incremental Sync | No | +| SSL connection | No | +| Namespaces | No | + +## Supported Streams + +* [Bookings](https://app.waiteraid.com/api-docs/index.html#api_get_bookings) + +## Data type map + +| Integration Type | Airbyte Type | +| :------------------ | :----------- | +| `string` | `string` | +| `integer`, `number` | `number` | +| `array` | `array` | +| `object` | `object` | + +## Changelog + +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------| +| 0.1.0 | 2022-10-QQ | [QQQQ](https://github.com/airbytehq/airbyte/pull/QQQQ) | New Source: Waiteraid | From 3275832ddc2aaf51cb6d86e837a6e8d36733d560 Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Fri, 28 Oct 2022 10:13:26 -0700 Subject: [PATCH 13/13] Replace `recipesLink` link with `tutorialsLink` (#18616) --- .../src/packages/cloud/views/layout/SideBar/SideBar.tsx | 2 +- airbyte-webapp/src/utils/links.ts | 2 +- airbyte-webapp/src/views/layout/SideBar/SideBar.tsx | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx b/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx index be2f40917663..c3818e7f7165 100644 --- a/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx +++ b/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx @@ -129,7 +129,7 @@ const SideBar: React.FC = () => { }, { type: SidebarDropdownMenuItemType.LINK, - href: links.recipesLink, + href: links.tutorialLink, icon: , displayName: , }, diff --git a/airbyte-webapp/src/utils/links.ts b/airbyte-webapp/src/utils/links.ts index ad99b8cd2f8b..56f3b1794c60 100644 --- a/airbyte-webapp/src/utils/links.ts +++ b/airbyte-webapp/src/utils/links.ts @@ -21,7 +21,7 @@ export const links = { namespaceLink: `${BASE_DOCS_LINK}/understanding-airbyte/namespaces`, tutorialLink: "https://www.youtube.com/watch?v=Rcpt5SVsMpk&feature=emb_logo", statusLink: "https://status.airbyte.io/", - recipesLink: "https://airbyte.com/recipes", + tutorialsLink: "https://airbyte.com/tutorials", syncModeLink: `${BASE_DOCS_LINK}/understanding-airbyte/connections`, demoLink: "https://demo.airbyte.io", contactSales: "https://airbyte.com/talk-to-sales", diff --git a/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx b/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx index 85c80f1fdeca..6c033332eb94 100644 --- a/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx +++ b/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx @@ -115,7 +115,7 @@ const SideBar: React.FC = () => { }, { type: SidebarDropdownMenuItemType.LINK, - href: links.recipesLink, + href: links.tutorialLink, icon: , displayName: , },