From 87d02f545a0509d5c3326ce048b59beb350d3b90 Mon Sep 17 00:00:00 2001 From: Maksym Pavlenok Date: Tue, 28 Sep 2021 19:15:27 +0300 Subject: [PATCH 01/36] fix 404 responses for the ticket_comments stream --- .../79c1aa37-dae3-42ae-b333-d1c105477715.json | 2 +- .../resources/seed/source_definitions.yaml | 2 +- .../source-zendesk-support/Dockerfile | 8 +- .../integration_tests/integration_test.py | 4 +- .../source-zendesk-support/setup.py | 2 +- .../source_zendesk_support/streams.py | 187 ++++++++++++------ 6 files changed, 140 insertions(+), 65 deletions(-) diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json index d54ab6bab2e52..a18afd5a840ff 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "79c1aa37-dae3-42ae-b333-d1c105477715", "name": "Zendesk Support", "dockerRepository": "airbyte/source-zendesk-support", - "dockerImageTag": "0.1.1", + "dockerImageTag": "0.1.2", "documentationUrl": "https://docs.airbyte.io/integrations/sources/zendesk-support", "icon": "zendesk.svg" } diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index cee91b610a7c2..5ca0c12429d40 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -214,7 +214,7 @@ - sourceDefinitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 name: Zendesk Support dockerRepository: airbyte/source-zendesk-support - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-support icon: zendesk.svg sourceType: api diff --git a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile index 867f7a9c27faa..2b42abf4150da 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile +++ b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile @@ -3,7 +3,8 @@ FROM base as builder RUN apk --no-cache upgrade \ - && pip install --upgrade pip + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base WORKDIR /airbyte/integration_code COPY setup.py ./ @@ -12,6 +13,9 @@ RUN pip install --prefix=/install . FROM base COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone WORKDIR /airbyte/integration_code COPY main.py ./ @@ -21,5 +25,5 @@ COPY source_zendesk_support ./source_zendesk_support ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/source-zendesk-support diff --git a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/integration_test.py b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/integration_test.py index 4d761ca23bcb0..9a20b04cd5f42 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/integration_test.py +++ b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/integration_test.py @@ -27,7 +27,7 @@ import pendulum import requests_mock from source_zendesk_support import SourceZendeskSupport -from source_zendesk_support.streams import Macros, TicketAudits, TicketMetrics, Tickets, Users +from source_zendesk_support.streams import LAST_END_TIME_KEY, Macros, TicketAudits, TicketMetrics, Tickets, Users CONFIG_FILE = "secrets/config.json" @@ -57,7 +57,7 @@ def _test_export_stream(self, stream_cls: type): stream.page_size = 10 for record_id, timestamp in record_timestamps.items(): - state = {"_last_end_time": timestamp} + state = {LAST_END_TIME_KEY: timestamp} for record in stream.read_records(sync_mode=None, stream_state=state): assert record["id"] != record_id break diff --git a/airbyte-integrations/connectors/source-zendesk-support/setup.py b/airbyte-integrations/connectors/source-zendesk-support/setup.py index eccd3a9849833..fe04191eabf77 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/setup.py +++ b/airbyte-integrations/connectors/source-zendesk-support/setup.py @@ -25,7 +25,7 @@ from setuptools import find_packages, setup -MAIN_REQUIREMENTS = ["airbyte-cdk", "pytz"] +MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1.23", "pytz"] TEST_REQUIREMENTS = ["pytest~=6.1", "source-acceptance-test", "requests-mock==1.9.3"] diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index 1ade4e8eff401..875e12b1ff40b 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -26,6 +26,7 @@ import calendar import time from abc import ABC, abstractmethod +from collections import defaultdict from datetime import datetime from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union from urllib.parse import parse_qsl, urlparse @@ -36,6 +37,7 @@ from airbyte_cdk.sources.streams.http import HttpStream DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" +LAST_END_TIME_KEY = "_last_end_time" class SourceZendeskException(Exception): @@ -64,7 +66,7 @@ def url_base(self) -> str: @staticmethod def _parse_next_page_number(response: requests.Response) -> Optional[int]: """Parses a response and tries to find next page number""" - next_page = response.json()["next_page"] + next_page = response.json().get("next_page") if next_page: return dict(parse_qsl(urlparse(next_page).query)).get("page") return None @@ -166,6 +168,17 @@ def get_updated_state(self, current_stream_state: MutableMapping[str, Any], late new_value = str((latest_record or {}).get(self.cursor_field, "")) return {self.cursor_field: max(new_value, old_value)} + @staticmethod + def str2unixtime(str_dt: str) -> Optional[int]: + """convert string to unixtime number + Input example: '2021-07-22T06:55:55Z' FROMAT : "%Y-%m-%dT%H:%M:%SZ" + Output example: 1626936955" + """ + if not str_dt: + return None + dt = datetime.strptime(str_dt, DATETIME_FORMAT) + return calendar.timegm(dt.utctimetuple()) + class IncrementalExportStream(IncrementalEntityStream, ABC): """Use the incremental export API to get items that changed or @@ -184,25 +197,15 @@ class IncrementalExportStream(IncrementalEntityStream, ABC): def __init__(self, **kwargs): super().__init__(**kwargs) + # for saving of last page cursor value # endpoints can have different cursor format but incremental logic uses unixtime format only - self._last_end_time = None - - @staticmethod - def str2unixtime(str_dt: str) -> int: - """convert string to unixtime number - Input example: '2021-07-22T06:55:55Z' FROMAT : "%Y-%m-%dT%H:%M:%SZ" - Output example: 1626936955" - """ - if not str_dt: - return None - dt = datetime.strptime(str_dt, DATETIME_FORMAT) - return calendar.timegm(dt.utctimetuple()) + self.last_end_time = None def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: if self.is_finished: return None - return {"start_time": self._last_end_time} + return {"start_time": self.last_end_time} def path(self, *args, **kwargs) -> str: return f"incremental/{self.name}.json" @@ -213,13 +216,14 @@ def request_params( params = {"per_page": self.page_size} if not next_page_token: - current_state = stream_state.get("_last_end_time") + current_state = stream_state.get(LAST_END_TIME_KEY) if not current_state: # try to search all reconds with generated_timestamp > start_time current_state = stream_state.get(self.cursor_field) if current_state and isinstance(current_state, str) and not current_state.isdigit(): current_state = self.str2unixtime(current_state) - + elif not self.last_end_time: + self.last_end_time = current_state start_time = int(current_state or time.mktime(self._start_date.timetuple())) # +1 because the API returns all records where generated_timestamp >= start_time @@ -235,23 +239,34 @@ def request_params( def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: # try to save maximum value of a cursor field + state = super().get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record) - if self._last_end_time: - state["_last_end_time"] = self._last_end_time + if self.last_end_time: + state[LAST_END_TIME_KEY] = self.last_end_time current_stream_state.update(state) return current_stream_state + def get_last_end_time(self) -> Optional[Union[str, int]]: + if not self.last_end_time: + return self.last_end_time + return self.datetime2str(datetime.fromtimestamp(self.last_end_time)) + def parse_response( self, response: requests.Response, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs ) -> Iterable[Mapping]: data = response.json() + # save previous end time for filtering of a current response + previous_end_time = self.get_last_end_time() # save a last end time for the next attempt - self._last_end_time = data["end_time"] + self.last_end_time = data["end_time"] # end_of_stream is true if the current request has returned all the results up to the current time; false otherwise self._finished = data["end_of_stream"] - yield from super().parse_response(response, stream_state=stream_state, stream_slice=stream_slice, **kwargs) + for record in super().parse_response(response, stream_state=stream_state, stream_slice=stream_slice, **kwargs): + if previous_end_time and record.get(self.cursor_field) <= previous_end_time: + continue + yield record class IncrementalUnsortedStream(IncrementalEntityStream, ABC): @@ -306,7 +321,12 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: params = super().request_params(next_page_token=next_page_token, **kwargs) - params["page"] = next_page_token or 1 + params.update( + { + "page": next_page_token or 1, + "per_page": self.page_size, + } + ) return params @@ -350,6 +370,10 @@ class TicketComments(IncrementalSortedPageStream): Thus at first we loads all updated tickets and after this tries to load all created/updated comments per every ticket""" + # Tickets can be removed throughout synchronization. The ZendDesk API will return a response + # with 404 code if a ticket is not exists. But it shouldn't break loading of other comments. + raise_on_http_errors = False + response_list_name = "comments" cursor_field = IncrementalSortedPageStream.created_at_field @@ -357,19 +381,12 @@ def __init__(self, **kwargs): super().__init__(**kwargs) # need to save a slice ticket state # because the function get_updated_state doesn't have a stream_slice as argument - self._slice_cursor_date = None + self._ticket_last_end_time = None def path(self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: ticket_id = stream_slice["id"] return f"tickets/{ticket_id}/comments.json" - def parse_response( - self, response: requests.Response, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs - ) -> Iterable[Mapping]: - # save a slice ticket state - self._cursor_ticket_date = stream_slice[Tickets.cursor_field] - yield from super().parse_response(response, stream_state=stream_state, stream_slice=stream_slice, **kwargs) - def stream_slices( self, sync_mode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None ) -> Iterable[Optional[Mapping[str, Any]]]: @@ -383,33 +400,61 @@ def stream_slices( ticket_stream_value = stream_state.get(Tickets.cursor_field) if not ticket_stream_value: # for backward compatibility because not all relevant states can have some last ticket state - ticket_stream_value = Tickets.str2unixtime(stream_state.get(self.cursor_field)) - - tickets = Tickets(start_date=self._start_date, subdomain=self._subdomain, authenticator=self.authenticator).read_records( - sync_mode=sync_mode, cursor_field=cursor_field, stream_state={Tickets.cursor_field: ticket_stream_value} - ) - - # selects all tickets what have at least one comment - ticket_ids = [ - { - "id": ticket["id"], - Tickets.cursor_field: ticket[Tickets.cursor_field], - } - for ticket in tickets - if ticket["comment_count"] - ] - self.logger.info(f"Found updated {len(ticket_ids)} ticket(s) with comments") - # sort slices by generated_timestamp - ticket_ids.sort(key=lambda ticket: ticket[Tickets.cursor_field]) - return ticket_ids + ticket_stream_value = self.str2unixtime(stream_state.get(self.cursor_field)) + + tickets_stream = Tickets(start_date=self._start_date, subdomain=self._subdomain, authenticator=self.authenticator) + ticket_pages = defaultdict(list) + last_end_time = stream_state.get(LAST_END_TIME_KEY, 0) + ticket_count = 0 + for ticket in tickets_stream.read_records( + sync_mode=sync_mode, + cursor_field=cursor_field, + stream_state={Tickets.cursor_field: ticket_stream_value, LAST_END_TIME_KEY: last_end_time}, + ): + if not ticket["comment_count"]: + # skip tickets without comments + continue + ticket_count += 1 + ticket_pages[tickets_stream.last_end_time].append( + { + "id": ticket["id"], + Tickets.cursor_field: ticket[Tickets.cursor_field], + } + ) + + if ticket_pages: + last_times = sorted(ticket_pages.keys()) + # tickets' loading is implemented per page but the stream 'tickets' has + # the addl stream state fields "_last_end_time" and its value is not compatible + # with comments' cursor fields. Thus we need to save it separately and add + # last_end_time info for every slice + last_page = {last_times[-1]: [ticket_pages[last_times[-1]].pop(-1)]} + + new_last_times = [last_end_time] + last_times[:-1] + ticket_pages = {new_last_times[i]: ticket_pages[last_times[i]] for i in range(len(last_times))} + ticket_pages.update(last_page) + + self.logger.info(f"Found {ticket_count} ticket(s) with comments") + for end_time, tickets in sorted(ticket_pages.items(), key=lambda t: t[0]): + self._ticket_last_end_time = end_time + yield from sorted(tickets, key=lambda ticket: ticket[Tickets.cursor_field]) def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: """Adds a last cursor ticket updated time for a comment state""" new_state = super().get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record) - if new_state: - new_state[Tickets.cursor_field] = self._cursor_ticket_date + if self._ticket_last_end_time: + + new_state[LAST_END_TIME_KEY] = self._ticket_last_end_time return new_state + def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: + """Handle response status""" + if response.status_code == 200: + yield from super().parse_response(response, stream_state=stream_state, **kwargs) + elif response.status_code != 404: + # skip 404 errors for not found tickets + response.raise_for_status() + # NOTE: all Zendesk endpoints can be splitted into several templates of data loading. # 1) with API built-in incremental approach @@ -419,6 +464,8 @@ def get_updated_state(self, current_stream_state: MutableMapping[str, Any], late # 5) without created_at/updated_at fields # endpoints provide a built-in incremental approach + + class Users(IncrementalExportStream): """Users stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/incremental_exports/""" @@ -435,19 +482,23 @@ class Tickets(IncrementalExportStream): # If a system update occurs after a event, the unchanged updated_at time will become earlier relative to the updated generated_timestamp time. cursor_field = "generated_timestamp" - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """Save state as integer""" - state = super().get_updated_state(current_stream_state, latest_record) - if state: - state[self.cursor_field] = int(state[self.cursor_field]) - return state - def request_params(self, **kwargs) -> MutableMapping[str, Any]: """Adds the field 'comment_count'""" params = super().request_params(**kwargs) params["include"] = "comment_count" return params + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + """Need to save a cursor values as integer""" + state = super().get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record) + if state and state.get(self.cursor_field): + state[self.cursor_field] = int(state[self.cursor_field]) + return state + + def get_last_end_time(self) -> Optional[Union[str, int]]: + """A response with tickets provides cursor data as unixtime""" + return self.last_end_time + # endpoints provide a pagination mechanism but we can't manage a response order @@ -461,7 +512,27 @@ class GroupMemberships(IncrementalUnsortedPageStream): class SatisfactionRatings(IncrementalUnsortedPageStream): - """SatisfactionRatings stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/satisfaction_ratings/""" + """SatisfactionRatings stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/satisfaction_ratings/ + + The ZenDesk API for this stream provides the filter "start_time" that can be used for incremental logic + """ + + def request_params( + self, stream_state: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs + ) -> MutableMapping[str, Any]: + """Adds the filtering field 'start_time'""" + params = super().request_params(stream_state=stream_state, next_page_token=next_page_token, **kwargs) + start_time = self.str2unixtime((stream_state or {}).get(self.cursor_field)) + + if not start_time: + start_time = int(time.mktime(self._start_date.timetuple())) + params.update( + { + "start_time": start_time, + "sort_by": "asc", + } + ) + return params class TicketFields(IncrementalUnsortedPageStream): From a873a6d8f2d627e9a0d0baff127ed3c12277a5e4 Mon Sep 17 00:00:00 2001 From: Maksym Pavlenok Date: Wed, 29 Sep 2021 18:59:23 +0300 Subject: [PATCH 02/36] add unit test --- .../source_zendesk_support/streams.py | 4 +- .../unit_tests/unit_test.py | 118 +++++++++++++----- 2 files changed, 88 insertions(+), 34 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index 875e12b1ff40b..4fa2eb348aaa8 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -248,6 +248,7 @@ def get_updated_state(self, current_stream_state: MutableMapping[str, Any], late return current_stream_state def get_last_end_time(self) -> Optional[Union[str, int]]: + """Updating of last_end_time for comparing with cursor fields""" if not self.last_end_time: return self.last_end_time return self.datetime2str(datetime.fromtimestamp(self.last_end_time)) @@ -256,9 +257,10 @@ def parse_response( self, response: requests.Response, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs ) -> Iterable[Mapping]: - data = response.json() # save previous end time for filtering of a current response previous_end_time = self.get_last_end_time() + + data = response.json() # save a last end time for the next attempt self.last_end_time = data["end_time"] # end_of_stream is true if the current request has returned all the results up to the current time; false otherwise diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py index f6bf158f7e9d8..227f1c63e0840 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py @@ -23,50 +23,102 @@ # import json -from unittest import TestCase +from unittest.mock import Mock +import pytest import requests import requests_mock +from requests.exceptions import HTTPError from source_zendesk_support import SourceZendeskSupport -from source_zendesk_support.streams import Tags +from source_zendesk_support.streams import Tags, TicketComments CONFIG_FILE = "secrets/config.json" -class TestZendeskSupport(TestCase): - """This test class provides a set of tests for different Zendesk streams. - The Zendesk API has difference pagination and sorting mechanisms for streams. - Let's try to check them - """ +@pytest.fixture(scope="module") +def prepare_stream_args(): + """Generates streams settings from a file""" + with open(CONFIG_FILE, "r") as f: + return SourceZendeskSupport.convert_config2stream_args(json.loads(f.read())) - @staticmethod - def prepare_stream_args(): - """Generates streams settings from a file""" - with open(CONFIG_FILE, "r") as f: - return SourceZendeskSupport.convert_config2stream_args(json.loads(f.read())) - def test_backoff_cases(self): - """Zendesk sends the header different value for backoff logic""" +@pytest.mark.parametrize( + "header_name,header_value,expected", + [ + # Retry-After > 0 + ("Retry-After", "123", 123), + # Retry-Afte < 0 + ("Retry-After", "-123", None), + # X-Rate-Limit > 0 + ("X-Rate-Limit", "100", 1.2), + # X-Rate-Limit header < 0 + ("X-Rate-Limit", "-100", None), + # Random header + ("Fake-Header", "-100", None), + ], +) +def test_backoff_cases(prepare_stream_args, header_name, header_value, expected): + """Zendesk sends the header different value for backoff logic""" - stream = Tags(**self.prepare_stream_args()) - default_timeout = None - with requests_mock.Mocker() as m: - url = stream.url_base + stream.path() + stream = Tags(**prepare_stream_args) + with requests_mock.Mocker() as m: + url = stream.url_base + stream.path() - # with the Retry-After header > 0 - m.get(url, headers={"Retry-After": str(123)}, status_code=429) - assert stream.backoff_time(requests.get(url)) == 123 - # with the Retry-After header < 0, must return a default value - m.get(url, headers={"Retry-After": str(-123)}, status_code=429) - assert stream.backoff_time(requests.get(url)) == default_timeout + m.get(url, headers={header_name: header_value}, status_code=429) + result = stream.backoff_time(requests.get(url)) + if expected: + assert (result - expected) < 0.005 + else: + assert result is None - # with the Retry-After header > 0 - m.get(url, headers={"X-Rate-Limit": str(100)}, status_code=429) - assert (stream.backoff_time(requests.get(url)) - 1.2) < 0.0005 - # with the Retry-After header < 0, must return a default value - m.get(url, headers={"X-Rate-Limit": str(-100)}, status_code=429) - assert stream.backoff_time(requests.get(url)) == default_timeout - # without rate headers - m.get(url) - assert stream.backoff_time(requests.get(url)) == default_timeout +def fake_ticket_objects(): + raise Exception("aa") + yield { + "id": 12355, + "comment_count": 1, + } + + +@pytest.mark.parametrize( + "status_code,expected_comment_count,expected_expection", + [ + # success + (200, 1, None), + # not found ticket + (404, 0, None), + # some another code error. + (403, 0, HTTPError), + ], +) +def test_comments_not_found_ticket(prepare_stream_args, status_code, expected_comment_count, expected_expection): + """Checks the case when some ticket is removed while sync of comments""" + fake_id = 12345 + stream = TicketComments(**prepare_stream_args) + with requests_mock.Mocker() as comment_mock: + path = f"tickets/{fake_id}/comments.json" + stream.path = Mock(return_value=path) + url = stream.url_base + path + comment_mock.get( + url, + status_code=status_code, + json={ + "comments": [ + { + "id": fake_id, + TicketComments.cursor_field: "2021-07-22T06:55:55Z", + } + ] + }, + ) + comments = stream.read_records( + sync_mode=None, + stream_slice={ + "id": fake_id, + }, + ) + if expected_expection: + with pytest.raises(expected_expection): + next(comments) + else: + assert len(list(comments)) == expected_comment_count From 274a1953a727f8bdc291db2d53c18e6309da391b Mon Sep 17 00:00:00 2001 From: Maksym Pavlenok Date: Wed, 29 Sep 2021 19:00:48 +0300 Subject: [PATCH 03/36] add unit test --- .../source-zendesk-support/unit_tests/unit_test.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py index 227f1c63e0840..adfdf2e754060 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py @@ -72,14 +72,6 @@ def test_backoff_cases(prepare_stream_args, header_name, header_value, expected) assert result is None -def fake_ticket_objects(): - raise Exception("aa") - yield { - "id": 12355, - "comment_count": 1, - } - - @pytest.mark.parametrize( "status_code,expected_comment_count,expected_expection", [ From d8d60588e5cfd3b25b0b6b4dbe06eb0be5f916f1 Mon Sep 17 00:00:00 2001 From: Maksym Pavlenok Date: Fri, 1 Oct 2021 01:04:48 +0300 Subject: [PATCH 04/36] add oauth2 access token --- .github/workflows/publish-command.yml | 1 + .github/workflows/test-command.yml | 1 + .../acceptance-test-config.yml | 3 ++ .../integration_tests/integration_test.py | 4 +-- .../source_zendesk_support/source.py | 8 +++-- .../source_zendesk_support/spec.json | 34 +++++++++++++++++++ .../source_zendesk_support/streams.py | 8 +++++ docs/integrations/sources/zendesk-support.md | 5 +-- tools/bin/ci_credentials.sh | 1 + 9 files changed, 59 insertions(+), 6 deletions(-) diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index 6f79ae9d1f461..082c8c4a0f00b 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -168,6 +168,7 @@ jobs: ZENDESK_SUNSHINE_TEST_CREDS: ${{ secrets.ZENDESK_SUNSHINE_TEST_CREDS }} ZENDESK_TALK_TEST_CREDS: ${{ secrets.ZENDESK_TALK_TEST_CREDS }} ZENDESK_SUPPORT_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_TEST_CREDS }} + ZENDESK_SUPPORT_OAUTH_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_OAUTH_TEST_CREDS }} ZOOM_INTEGRATION_TEST_CREDS: ${{ secrets.ZOOM_INTEGRATION_TEST_CREDS }} PLAID_INTEGRATION_TEST_CREDS: ${{ secrets.PLAID_INTEGRATION_TEST_CREDS }} DESTINATION_S3_INTEGRATION_TEST_CREDS: ${{ secrets.DESTINATION_S3_INTEGRATION_TEST_CREDS }} diff --git a/.github/workflows/test-command.yml b/.github/workflows/test-command.yml index 0275fe11d14f7..f4b744de53dd6 100644 --- a/.github/workflows/test-command.yml +++ b/.github/workflows/test-command.yml @@ -163,6 +163,7 @@ jobs: ZENDESK_SUNSHINE_TEST_CREDS: ${{ secrets.ZENDESK_SUNSHINE_TEST_CREDS }} ZENDESK_TALK_TEST_CREDS: ${{ secrets.ZENDESK_TALK_TEST_CREDS }} ZENDESK_SUPPORT_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_TEST_CREDS }} + ZENDESK_SUPPORT_OAUTH_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_OAUTH_TEST_CREDS }} ZOOM_INTEGRATION_TEST_CREDS: ${{ secrets.ZOOM_INTEGRATION_TEST_CREDS }} PLAID_INTEGRATION_TEST_CREDS: ${{ secrets.PLAID_INTEGRATION_TEST_CREDS }} DESTINATION_S3_INTEGRATION_TEST_CREDS: ${{ secrets.DESTINATION_S3_INTEGRATION_TEST_CREDS }} diff --git a/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml index 61da7ab6b6429..b708da780ae85 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml @@ -7,10 +7,13 @@ tests: connection: - config_path: "secrets/config.json" status: "succeed" + - config_path: "secrets/config_oauth.json" + status: "succeed" - config_path: "integration_tests/invalid_config.json" status: "failed" discovery: - config_path: "secrets/config.json" + - config_path: "secrets/config_oauth.json" basic_read: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/integration_test.py b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/integration_test.py index c8b26047de791..7e64a76aa3cdb 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/integration_test.py +++ b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/integration_test.py @@ -32,8 +32,8 @@ def _test_export_stream(self, stream_cls: type): # save the first 5 records if len(record_timestamps) > 5: break - if stream._last_end_time not in record_timestamps.values(): - record_timestamps[record["id"]] = stream._last_end_time + if stream.last_end_time not in record_timestamps.values(): + record_timestamps[record["id"]] = stream.last_end_time stream.page_size = 10 for record_id, timestamp in record_timestamps.items(): diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py index f6719d58a21f7..393b118ddddb5 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py @@ -8,7 +8,7 @@ import requests from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator +from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator from .streams import ( GroupMemberships, @@ -29,6 +29,8 @@ UserSettingsStream, ) +# from airbyte_cdk.sources.streams.http.auth.token import TokenAuthenticator + class BasicApiTokenAuthenticator(TokenAuthenticator): """basic Authorization header""" @@ -47,7 +49,9 @@ class SourceZendeskSupport(AbstractSource): @classmethod def get_authenticator(cls, config: Mapping[str, Any]) -> BasicApiTokenAuthenticator: - if config["auth_method"].get("email") and config["auth_method"].get("api_token"): + if config["auth_method"].get("access_token"): + return TokenAuthenticator(token=config["auth_method"]["access_token"]) + elif config["auth_method"].get("email") and config["auth_method"].get("api_token"): return BasicApiTokenAuthenticator(config["auth_method"]["email"], config["auth_method"]["api_token"]) raise SourceZendeskException(f"Not implemented authorization method: {config['auth_method']}") diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json index 5bd782541f0f5..5dfc15f628c39 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json @@ -23,6 +23,32 @@ "default": "api_token", "description": "Zendesk service provides 2 auth method: API token and oAuth2. Now only the first one is available. Another one will be added in the future", "oneOf": [ + { + "type": "object", + "title": "OAuth2.0 authorization", + "properties": { + "auth_method": { + "type": "string", + "const": "access_token" + }, + "client_id": { + "type": "string", + "description": "This is the name of your client for use in code." + }, + "client_secret": { + "type": "string", + "airbyte_secret": true, + "description": "This secret token is used by apps redirecting to your client. " + }, + "access_token": { + "type": "string", + "airbyte_secret": true, + "description": "Using for authorization. The access token doesn't expire." + } + }, + "required": ["access_token"], + "additionalProperties": false + }, { "title": "API Token", "type": "object", @@ -47,5 +73,13 @@ ] } } + }, + "authSpecification": { + "auth_type": "oauth2.0", + "oauth2Specification": { + "rootObject": ["auth_method", 0], + "oauthFlowInitParameters": [["client_id"], ["client_secret"]], + "oauthFlowOutputParameters": [["access_token"]] + } } } diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index 326c2061ef3b1..d99f690cda63f 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -15,6 +15,7 @@ import requests from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.auth.core import HttpAuthenticator DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" LAST_END_TIME_KEY = "_last_end_time" @@ -127,6 +128,13 @@ def __init__(self, start_date: str, **kwargs): # Flag for marking of completed process self._finished = False + @property + def authenticator(self) -> HttpAuthenticator: + """This function was redefined because CDK return NoAuth for some authenticator class. + It is bug and I hope it will be fixed in the future + """ + return self._session.auth or super().authenticator + @property def is_finished(self): return self._finished diff --git a/docs/integrations/sources/zendesk-support.md b/docs/integrations/sources/zendesk-support.md index 510154b59446b..8a6a03f957e1b 100644 --- a/docs/integrations/sources/zendesk-support.md +++ b/docs/integrations/sources/zendesk-support.md @@ -79,8 +79,8 @@ The Zendesk connector should not run into Zendesk API limitations under normal u * API Token * Zendesk API Token * Zendesk Email - * oAuth2 (not implemented) - + * oAuth2 + * Access Token ### Setup guide @@ -91,6 +91,7 @@ We recommend creating a restricted, read-only key specifically for Airbyte acces ### CHANGELOG | Version | Date | Pull Request | Subject | | :------ | :-------- | :----- | :------ | +| `0.1.2` | 2021-10-01 | [6513](https://github.com/airbytehq/airbyte/pull/6513) | support oAuth2 access tocken | | `0.1.1` | 2021-09-02 | [5787](https://github.com/airbytehq/airbyte/pull/5787) | fixed incremental logic for the ticket_comments stream | | `0.1.0` | 2021-07-21 | [4861](https://github.com/airbytehq/airbyte/pull/4861) | created CDK native zendesk connector | diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index add334226dcd9..4474aae6ff371 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -133,6 +133,7 @@ write_standard_creds source-us-census "$SOURCE_US_CENSUS_TEST_CREDS" write_standard_creds source-zendesk-chat "$ZENDESK_CHAT_INTEGRATION_TEST_CREDS" write_standard_creds source-zendesk-sunshine "$ZENDESK_SUNSHINE_TEST_CREDS" write_standard_creds source-zendesk-support "$ZENDESK_SUPPORT_TEST_CREDS" +write_standard_creds source-zendesk-support "$ZENDESK_SUPPORT_OAUTH_TEST_CREDS" "config_oauth.json" write_standard_creds source-zendesk-talk "$ZENDESK_TALK_TEST_CREDS" write_standard_creds source-zoom-singer "$ZOOM_INTEGRATION_TEST_CREDS" write_standard_creds source-zuora "$SOURCE_ZUORA_TEST_CREDS" From 9f98748c109710e29688ed73ab35b5debe6e7389 Mon Sep 17 00:00:00 2001 From: Maksym Pavlenok Date: Fri, 8 Oct 2021 12:43:11 +0300 Subject: [PATCH 05/36] Update airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py Co-authored-by: George Claireaux --- .../source-zendesk-support/source_zendesk_support/streams.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index d99f690cda63f..d4cd87ec09828 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -159,7 +159,7 @@ def get_updated_state(self, current_stream_state: MutableMapping[str, Any], late @staticmethod def str2unixtime(str_dt: str) -> Optional[int]: """convert string to unixtime number - Input example: '2021-07-22T06:55:55Z' FROMAT : "%Y-%m-%dT%H:%M:%SZ" + Input example: '2021-07-22T06:55:55Z' FORMAT : "%Y-%m-%dT%H:%M:%SZ" Output example: 1626936955" """ if not str_dt: From 7df812c88b6ef8a8c009c00983824384dcd3ce23 Mon Sep 17 00:00:00 2001 From: Maksym Pavlenok Date: Fri, 8 Oct 2021 14:34:20 +0300 Subject: [PATCH 06/36] switching among auth methods --- .../source-zendesk-support/source_zendesk_support/source.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py index 393b118ddddb5..5c0e8070d7ced 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py @@ -49,9 +49,9 @@ class SourceZendeskSupport(AbstractSource): @classmethod def get_authenticator(cls, config: Mapping[str, Any]) -> BasicApiTokenAuthenticator: - if config["auth_method"].get("access_token"): + if config["auth_method"]["auth_method"] == "access_token": return TokenAuthenticator(token=config["auth_method"]["access_token"]) - elif config["auth_method"].get("email") and config["auth_method"].get("api_token"): + elif config["auth_method"]["auth_method"] == "api_token": return BasicApiTokenAuthenticator(config["auth_method"]["email"], config["auth_method"]["api_token"]) raise SourceZendeskException(f"Not implemented authorization method: {config['auth_method']}") From fa159a55d7b3d8005d71fc07d8043d77fefa8a80 Mon Sep 17 00:00:00 2001 From: Maksym Pavlenok Date: Sat, 16 Oct 2021 01:36:13 +0300 Subject: [PATCH 07/36] update spec file --- .../79c1aa37-dae3-42ae-b333-d1c105477715.json | 2 +- .../resources/seed/source_definitions.yaml | 2 +- .../source-zendesk-support/Dockerfile | 2 +- .../source_zendesk_support/spec.json | 52 ++++++++----------- 4 files changed, 25 insertions(+), 33 deletions(-) diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json index a18afd5a840ff..6a3aecdaa2153 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "79c1aa37-dae3-42ae-b333-d1c105477715", "name": "Zendesk Support", "dockerRepository": "airbyte/source-zendesk-support", - "dockerImageTag": "0.1.2", + "dockerImageTag": "0.1.3", "documentationUrl": "https://docs.airbyte.io/integrations/sources/zendesk-support", "icon": "zendesk.svg" } diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 261361691938e..2480edf736471 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -338,7 +338,7 @@ - sourceDefinitionId: c8630570-086d-4a40-99ae-ea5b18673071 name: Zendesk Talk dockerRepository: airbyte/source-zendesk-talk - dockerImageTag: 0.1.2 + dockerImageTag: 0.1.3 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-talk sourceType: api - sourceDefinitionId: 29b409d9-30a5-4cc8-ad50-886eb846fea3 diff --git a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile index 2b42abf4150da..f44e3e602d74d 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile +++ b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile @@ -25,5 +25,5 @@ COPY source_zendesk_support ./source_zendesk_support ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.2 +LABEL io.airbyte.version=0.1.3 LABEL io.airbyte.name=airbyte/source-zendesk-support diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json index c119574e5a7d9..5dfc15f628c39 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json @@ -4,19 +4,13 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Source Zendesk Support Spec", "type": "object", - "required": [ - "start_date", - "subdomain", - "auth_method" - ], + "required": ["start_date", "subdomain", "auth_method"], "additionalProperties": false, "properties": { "start_date": { "type": "string", "description": "The date from which you'd like to replicate data for Zendesk Support API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.", - "examples": [ - "2020-10-15T00:00:00Z" - ], + "examples": ["2020-10-15T00:00:00Z"], "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" }, "subdomain": { @@ -31,22 +25,34 @@ "oneOf": [ { "type": "object", - "title": "OAuth2.0 authorization (not implemented)", + "title": "OAuth2.0 authorization", "properties": { "auth_method": { "type": "string", "const": "access_token" + }, + "client_id": { + "type": "string", + "description": "This is the name of your client for use in code." + }, + "client_secret": { + "type": "string", + "airbyte_secret": true, + "description": "This secret token is used by apps redirecting to your client. " + }, + "access_token": { + "type": "string", + "airbyte_secret": true, + "description": "Using for authorization. The access token doesn't expire." } }, + "required": ["access_token"], "additionalProperties": false }, { "title": "API Token", "type": "object", - "required": [ - "email", - "api_token" - ], + "required": ["email", "api_token"], "additionalProperties": false, "properties": { "auth_method": { @@ -71,23 +77,9 @@ "authSpecification": { "auth_type": "oauth2.0", "oauth2Specification": { - "rootObject": [ - "auth_method", - 0 - ], - "oauthFlowInitParameters": [ - [ - "client_id" - ], - [ - "client_secret" - ] - ], - "oauthFlowOutputParameters": [ - [ - "access_token" - ] - ] + "rootObject": ["auth_method", 0], + "oauthFlowInitParameters": [["client_id"], ["client_secret"]], + "oauthFlowOutputParameters": [["access_token"]] } } } From 131df60b4033b7fa25f25430e1fd5ec592229faf Mon Sep 17 00:00:00 2001 From: antixar Date: Mon, 1 Nov 2021 19:40:06 +0200 Subject: [PATCH 08/36] update CI secrets logic --- .github/workflows/publish-command.yml | 131 ++------------------------ .github/workflows/test-command.yml | 127 +------------------------ tools/bin/ci_credentials.sh | 120 ++++++++++++++++++++++- tools/lib/gcp-token.sh | 90 ++++++++++++++++++ 4 files changed, 219 insertions(+), 249 deletions(-) create mode 100644 tools/lib/gcp-token.sh diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index c79a899b755f1..a35439e0a42a6 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -70,131 +70,14 @@ jobs: - name: Install Pyenv run: python3 -m pip install virtualenv==16.7.9 --user - name: Write Integration Test Credentials # TODO DRY this with test-command.yml - run: ./tools/bin/ci_credentials.sh + run: ./tools/bin/ci_credentials.sh test ${{ github.event.inputs.connector }} env: - AMAZON_SELLER_PARTNER_TEST_CREDS: ${{ secrets.AMAZON_SELLER_PARTNER_TEST_CREDS }} - AMAZON_ADS_TEST_CREDS: ${{ secrets.AMAZON_ADS_TEST_CREDS }} - AMPLITUDE_INTEGRATION_TEST_CREDS: ${{ secrets.AMPLITUDE_INTEGRATION_TEST_CREDS }} - AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} - AWS_REDSHIFT_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_REDSHIFT_INTEGRATION_TEST_CREDS }} - AWS_ORACLE_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_ORACLE_INTEGRATION_TEST_CREDS }} - SOURCE_AWS_CLOUDTRAIL_CREDS: ${{ secrets.SOURCE_AWS_CLOUDTRAIL_CREDS }} - AZURE_STORAGE_INTEGRATION_TEST_CREDS: ${{ secrets.AZURE_STORAGE_INTEGRATION_TEST_CREDS }} - BIGQUERY_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_INTEGRATION_TEST_CREDS }} - BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS }} - SOURCE_BING_ADS_CREDS: ${{ secrets.SOURCE_BING_ADS_CREDS }} - BIGQUERY_TEST_CREDS: ${{ secrets.BIGQUERY_TEST_CREDS }} - BRAINTREE_TEST_CREDS: ${{ secrets.BRAINTREE_TEST_CREDS }} - CART_TEST_CREDS: ${{ secrets.CART_TEST_CREDS }} - CHARGEBEE_INTEGRATION_TEST_CREDS: ${{ secrets.CHARGEBEE_INTEGRATION_TEST_CREDS }} - DESTINATION_POSTGRES_SSH_KEY_TEST_CREDS: ${{ secrets.DESTINATION_POSTGRES_SSH_KEY_TEST_CREDS }} - DESTINATION_POSTGRES_SSH_PWD_TEST_CREDS: ${{ secrets.DESTINATION_POSTGRES_SSH_PWD_TEST_CREDS }} - DESTINATION_PUBSUB_TEST_CREDS: ${{ secrets.DESTINATION_PUBSUB_TEST_CREDS }} - DESTINATION_KEEN_TEST_CREDS: ${{ secrets.DESTINATION_KEEN_TEST_CREDS }} - DESTINATION_KVDB_TEST_CREDS: ${{ secrets.DESTINATION_KVDB_TEST_CREDS }} - DRIFT_INTEGRATION_TEST_CREDS: ${{ secrets.DRIFT_INTEGRATION_TEST_CREDS }} - SOURCE_DIXA_TEST_CREDS: ${{ secrets.SOURCE_DIXA_TEST_CREDS }} - EXCHANGE_RATES_TEST_CREDS: ${{ secrets.EXCHANGE_RATES_TEST_CREDS }} - FACEBOOK_MARKETING_TEST_INTEGRATION_CREDS: ${{ secrets.FACEBOOK_MARKETING_TEST_INTEGRATION_CREDS }} - FACEBOOK_PAGES_INTEGRATION_TEST_CREDS: ${{ secrets.FACEBOOK_PAGES_INTEGRATION_TEST_CREDS }} - FILE_SECURE_HTTPS_TEST_CREDS: ${{ secrets.FILE_SECURE_HTTPS_TEST_CREDS }} - FRESHDESK_TEST_CREDS: ${{ secrets.FRESHDESK_TEST_CREDS }} - GITLAB_INTEGRATION_TEST_CREDS: ${{ secrets.GITLAB_INTEGRATION_TEST_CREDS }} - GH_NATIVE_INTEGRATION_TEST_CREDS: ${{ secrets.GH_NATIVE_INTEGRATION_TEST_CREDS }} - GOOGLE_ADS_TEST_CREDS: ${{ secrets.GOOGLE_ADS_TEST_CREDS }} - GOOGLE_ANALYTICS_V4_TEST_CREDS: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS }} - GOOGLE_ANALYTICS_V4_TEST_CREDS_SRV_ACC: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS_SRV_ACC }} - GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD }} - GOOGLE_CLOUD_STORAGE_TEST_CREDS: ${{ secrets.GOOGLE_CLOUD_STORAGE_TEST_CREDS }} - GOOGLE_DIRECTORY_TEST_CREDS: ${{ secrets.GOOGLE_DIRECTORY_TEST_CREDS }} - GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS }} - GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC }} - GOOGLE_SHEETS_TESTS_CREDS: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS }} - GOOGLE_SHEETS_TESTS_CREDS_SRV_ACC: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS_SRV_ACC }} - GOOGLE_SHEETS_TESTS_CREDS_OLD: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS_OLD }} - GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS: ${{ secrets.GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS }} - GREENHOUSE_TEST_CREDS: ${{ secrets.GREENHOUSE_TEST_CREDS }} - GREENHOUSE_TEST_CREDS_LIMITED: ${{ secrets.GREENHOUSE_TEST_CREDS_LIMITED }} - HARVEST_INTEGRATION_TESTS_CREDS: ${{ secrets.HARVEST_INTEGRATION_TESTS_CREDS }} - HUBSPOT_INTEGRATION_TESTS_CREDS: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS }} - HUBSPOT_INTEGRATION_TESTS_CREDS_OAUTH: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS_OAUTH }} - INSTAGRAM_INTEGRATION_TESTS_CREDS: ${{ secrets.INSTAGRAM_INTEGRATION_TESTS_CREDS }} - INTERCOM_INTEGRATION_TEST_CREDS: ${{ secrets.INTERCOM_INTEGRATION_TEST_CREDS }} - ITERABLE_INTEGRATION_TEST_CREDS: ${{ secrets.ITERABLE_INTEGRATION_TEST_CREDS }} - JIRA_INTEGRATION_TEST_CREDS: ${{ secrets.JIRA_INTEGRATION_TEST_CREDS }} - KLAVIYO_TEST_CREDS: ${{ secrets.KLAVIYO_TEST_CREDS }} - LEVER_HIRING_INTEGRATION_TEST_CREDS: ${{ secrets.LEVER_HIRING_INTEGRATION_TEST_CREDS }} - LOOKER_INTEGRATION_TEST_CREDS: ${{ secrets.LOOKER_INTEGRATION_TEST_CREDS }} - MAILCHIMP_TEST_CREDS: ${{ secrets.MAILCHIMP_TEST_CREDS }} - MICROSOFT_TEAMS_TEST_CREDS: ${{ secrets.MICROSOFT_TEAMS_TEST_CREDS }} - MIXPANEL_INTEGRATION_TEST_CREDS: ${{ secrets.MIXPANEL_INTEGRATION_TEST_CREDS }} - MSSQL_RDS_TEST_CREDS: ${{ secrets.MSSQL_RDS_TEST_CREDS }} - PAYPAL_TRANSACTION_CREDS: ${{ secrets.SOURCE_PAYPAL_TRANSACTION_CREDS }} - POSTGRES_SSH_KEY_TEST_CREDS: ${{ secrets.POSTGRES_SSH_KEY_TEST_CREDS }} - POSTGRES_SSH_PWD_TEST_CREDS: ${{ secrets.POSTGRES_SSH_PWD_TEST_CREDS }} - MYSQL_SSH_KEY_TEST_CREDS: ${{ secrets.MYSQL_SSH_KEY_TEST_CREDS }} - MYSQL_SSH_PWD_TEST_CREDS: ${{ secrets.MYSQL_SSH_PWD_TEST_CREDS }} - POSTHOG_TEST_CREDS: ${{ secrets.POSTHOG_TEST_CREDS }} - PIPEDRIVE_INTEGRATION_TESTS_CREDS: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS }} - PIPEDRIVE_INTEGRATION_TESTS_CREDS_OAUTH: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS_OAUTH }} - PIPEDRIVE_INTEGRATION_TESTS_CREDS_OLD: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS_OLD }} - RECHARGE_INTEGRATION_TEST_CREDS: ${{ secrets.RECHARGE_INTEGRATION_TEST_CREDS }} - QUICKBOOKS_TEST_CREDS: ${{ secrets.QUICKBOOKS_TEST_CREDS }} - SALESFORCE_BULK_INTEGRATION_TESTS_CREDS: ${{ secrets.SALESFORCE_BULK_INTEGRATION_TESTS_CREDS }} - SALESFORCE_INTEGRATION_TESTS_CREDS: ${{ secrets.SALESFORCE_INTEGRATION_TESTS_CREDS }} - SENDGRID_INTEGRATION_TEST_CREDS: ${{ secrets.SENDGRID_INTEGRATION_TEST_CREDS }} - SHOPIFY_INTEGRATION_TEST_CREDS: ${{ secrets.SHOPIFY_INTEGRATION_TEST_CREDS }} - SHOPIFY_INTEGRATION_TEST_OAUTH_CREDS: ${{ secrets.SHOPIFY_INTEGRATION_TEST_OAUTH_CREDS }} - SOURCE_ASANA_TEST_CREDS: ${{ secrets.SOURCE_ASANA_TEST_CREDS }} - SOURCE_OKTA_TEST_CREDS: ${{ secrets.SOURCE_OKTA_TEST_CREDS }} - SOURCE_SLACK_TEST_CREDS: ${{ secrets.SOURCE_SLACK_TEST_CREDS }} - SOURCE_SLACK_OAUTH_TEST_CREDS: ${{ secrets.SOURCE_SLACK_OAUTH_TEST_CREDS }} - SOURCE_US_CENSUS_TEST_CREDS: ${{ secrets.SOURCE_US_CENSUS_TEST_CREDS }} - SMARTSHEETS_TEST_CREDS: ${{ secrets.SMARTSHEETS_TEST_CREDS }} - SOURCE_SNAPCHAT_MARKETING_CREDS: ${{ secrets.SOURCE_SNAPCHAT_MARKETING_CREDS }} - SNOWFLAKE_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_INTEGRATION_TEST_CREDS }} - SNOWFLAKE_S3_COPY_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_S3_COPY_INTEGRATION_TEST_CREDS }} - SNOWFLAKE_GCS_COPY_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_GCS_COPY_INTEGRATION_TEST_CREDS }} - SOURCE_SQUARE_CREDS: ${{ secrets.SOURCE_SQUARE_CREDS }} - SOURCE_MARKETO_TEST_CREDS: ${{ secrets.SOURCE_MARKETO_TEST_CREDS }} - SOURCE_RECURLY_INTEGRATION_TEST_CREDS: ${{ secrets.SOURCE_RECURLY_INTEGRATION_TEST_CREDS }} - SOURCE_S3_TEST_CREDS: ${{ secrets.SOURCE_S3_TEST_CREDS }} - SOURCE_S3_PARQUET_CREDS: ${{ secrets.SOURCE_S3_PARQUET_CREDS }} - SOURCE_SHORTIO_TEST_CREDS: ${{ secrets.SOURCE_SHORTIO_TEST_CREDS }} - SOURCE_STRIPE_CREDS: ${{ secrets.SOURCE_STRIPE_CREDS }} - STRIPE_INTEGRATION_CONNECTED_ACCOUNT_TEST_CREDS: ${{ secrets.STRIPE_INTEGRATION_CONNECTED_ACCOUNT_TEST_CREDS }} - SURVEYMONKEY_TEST_CREDS: ${{ secrets.SURVEYMONKEY_TEST_CREDS }} - TEMPO_INTEGRATION_TEST_CREDS: ${{ secrets.TEMPO_INTEGRATION_TEST_CREDS }} - TRELLO_TEST_CREDS: ${{ secrets.TRELLO_TEST_CREDS }} - TWILIO_TEST_CREDS: ${{ secrets.TWILIO_TEST_CREDS }} - SOURCE_TYPEFORM_CREDS: ${{ secrets.SOURCE_TYPEFORM_CREDS }} - ZENDESK_CHAT_INTEGRATION_TEST_CREDS: ${{ secrets.ZENDESK_CHAT_INTEGRATION_TEST_CREDS }} - ZENDESK_SUNSHINE_TEST_CREDS: ${{ secrets.ZENDESK_SUNSHINE_TEST_CREDS }} - ZENDESK_TALK_TEST_CREDS: ${{ secrets.ZENDESK_TALK_TEST_CREDS }} - ZENDESK_SUPPORT_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_TEST_CREDS }} - ZENDESK_SUPPORT_OAUTH_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_OAUTH_TEST_CREDS }} - ZOOM_INTEGRATION_TEST_CREDS: ${{ secrets.ZOOM_INTEGRATION_TEST_CREDS }} - PLAID_INTEGRATION_TEST_CREDS: ${{ secrets.PLAID_INTEGRATION_TEST_CREDS }} - DESTINATION_S3_INTEGRATION_TEST_CREDS: ${{ secrets.DESTINATION_S3_INTEGRATION_TEST_CREDS }} - DESTINATION_AZURE_BLOB_CREDS: ${{ secrets.DESTINATION_AZURE_BLOB_CREDS }} - DESTINATION_GCS_CREDS: ${{ secrets.DESTINATION_GCS_CREDS }} - APIFY_INTEGRATION_TEST_CREDS: ${{ secrets.APIFY_INTEGRATION_TEST_CREDS }} - DESTINATION_DYNAMODB_TEST_CREDS: ${{ secrets.DESTINATION_DYNAMODB_TEST_CREDS }} - SOURCE_ZUORA_TEST_CREDS: ${{ secrets.SOURCE_ZUORA_TEST_CREDS }} - SOURCE_CLOSE_COM_CREDS: ${{ secrets.SOURCE_CLOSE_COM_CREDS }} - SOURCE_BAMBOO_HR_CREDS: ${{ secrets.SOURCE_BAMBOO_HR_CREDS }} - SOURCE_LINKEDIN_ADS_TEST_CREDS: ${{ secrets.SOURCE_LINKEDIN_ADS_TEST_CREDS }} - SOURCE_BIGCOMMERCE_CREDS: ${{ secrets.SOURCE_BIGCOMMERCE_CREDS }} - SOURCE_TIKTOK_MARKETING_TEST_CREDS: ${{ secrets.SOURCE_TIKTOK_MARKETING_TEST_CREDS }} - SOURCE_TIKTOK_MARKETING_PROD_TEST_CREDS: ${{ secrets.SOURCE_TIKTOK_MARKETING_PROD_TEST_CREDS }} - DESTINATION_DATABRICKS_CREDS: ${{ secrets.DESTINATION_DATABRICKS_CREDS }} - MONGODB_TEST_CREDS: ${{ secrets.MONGODB_TEST_CREDS }} - SOURCE_ONESIGNAL_TEST_CREDS: ${{ secrets.SOURCE_ONESIGNAL_TEST_CREDS }} - SOURCE_SALESLOFT_TEST_CREDS: ${{ secrets.SOURCE_SALESLOFT_TEST_CREDS }} - SOURCE_AMAZON_SQS_TEST_CREDS: ${{ secrets.SOURCE_AMAZON_SQS_TEST_CREDS }} - SOURCE_FRESHSERVICE_TEST_CREDS: ${{ secrets.SOURCE_FRESHSERVICE_TEST_CREDS }} - SOURCE_LEMLIST_TEST_CREDS: ${{ secrets.SOURCE_LEMLIST_TEST_CREDS }} - SOURCE_STRAVA_TEST_CREDS: ${{ secrets.SOURCE_STRAVA_TEST_CREDS }} + SECRETS_JSON: ${{ toJson(secrets) }} + GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} + run: ./tools/bin/ci_credentials.sh publish ${{ github.event.inputs.connector }} + env: + SECRETS_JSON: ${{ toJson(secrets) }} + GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} - run: | echo "$SPEC_CACHE_SERVICE_ACCOUNT_KEY" > spec_cache_key_file.json && docker login -u airbytebot -p ${DOCKER_PASSWORD} ./tools/integrations/manage.sh publish airbyte-integrations/${{ github.event.inputs.connector }} ${{ github.event.inputs.run-tests }} --publish_spec_to_cache diff --git a/.github/workflows/test-command.yml b/.github/workflows/test-command.yml index 18808933d48fa..559da9a1cea69 100644 --- a/.github/workflows/test-command.yml +++ b/.github/workflows/test-command.yml @@ -65,131 +65,10 @@ jobs: - name: Install Pyenv run: python3 -m pip install virtualenv==16.7.9 --user - name: Write Integration Test Credentials - run: ./tools/bin/ci_credentials.sh + run: ./tools/bin/ci_credentials.sh test ${{ github.event.inputs.connector }} env: - AMAZON_SELLER_PARTNER_TEST_CREDS: ${{ secrets.AMAZON_SELLER_PARTNER_TEST_CREDS }} - AMAZON_ADS_TEST_CREDS: ${{ secrets.AMAZON_ADS_TEST_CREDS }} - AMPLITUDE_INTEGRATION_TEST_CREDS: ${{ secrets.AMPLITUDE_INTEGRATION_TEST_CREDS }} - AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} - AWS_ORACLE_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_ORACLE_INTEGRATION_TEST_CREDS }} - SOURCE_AWS_CLOUDTRAIL_CREDS: ${{ secrets.SOURCE_AWS_CLOUDTRAIL_CREDS }} - AWS_REDSHIFT_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_REDSHIFT_INTEGRATION_TEST_CREDS }} - AZURE_STORAGE_INTEGRATION_TEST_CREDS: ${{ secrets.AZURE_STORAGE_INTEGRATION_TEST_CREDS }} - BIGQUERY_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_INTEGRATION_TEST_CREDS }} - BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS }} - SOURCE_BING_ADS_CREDS: ${{ secrets.SOURCE_BING_ADS_CREDS }} - BIGQUERY_TEST_CREDS: ${{ secrets.BIGQUERY_TEST_CREDS }} - BRAINTREE_TEST_CREDS: ${{ secrets.BRAINTREE_TEST_CREDS }} - CART_TEST_CREDS: ${{ secrets.CART_TEST_CREDS }} - CHARGEBEE_INTEGRATION_TEST_CREDS: ${{ secrets.CHARGEBEE_INTEGRATION_TEST_CREDS }} - DESTINATION_POSTGRES_SSH_KEY_TEST_CREDS: ${{ secrets.DESTINATION_POSTGRES_SSH_KEY_TEST_CREDS }} - DESTINATION_POSTGRES_SSH_PWD_TEST_CREDS: ${{ secrets.DESTINATION_POSTGRES_SSH_PWD_TEST_CREDS }} - DESTINATION_PUBSUB_TEST_CREDS: ${{ secrets.DESTINATION_PUBSUB_TEST_CREDS }} - DESTINATION_KEEN_TEST_CREDS: ${{ secrets.DESTINATION_KEEN_TEST_CREDS }} - DESTINATION_KVDB_TEST_CREDS: ${{ secrets.DESTINATION_KVDB_TEST_CREDS }} - DRIFT_INTEGRATION_TEST_CREDS: ${{ secrets.DRIFT_INTEGRATION_TEST_CREDS }} - SOURCE_DIXA_TEST_CREDS: ${{ secrets.SOURCE_DIXA_TEST_CREDS }} - EXCHANGE_RATES_TEST_CREDS: ${{ secrets.EXCHANGE_RATES_TEST_CREDS }} - FACEBOOK_MARKETING_TEST_INTEGRATION_CREDS: ${{ secrets.FACEBOOK_MARKETING_TEST_INTEGRATION_CREDS }} - FACEBOOK_PAGES_INTEGRATION_TEST_CREDS: ${{ secrets.FACEBOOK_PAGES_INTEGRATION_TEST_CREDS }} - FILE_SECURE_HTTPS_TEST_CREDS: ${{ secrets.FILE_SECURE_HTTPS_TEST_CREDS }} - FRESHDESK_TEST_CREDS: ${{ secrets.FRESHDESK_TEST_CREDS }} - GITLAB_INTEGRATION_TEST_CREDS: ${{ secrets.GITLAB_INTEGRATION_TEST_CREDS }} - GH_NATIVE_INTEGRATION_TEST_CREDS: ${{ secrets.GH_NATIVE_INTEGRATION_TEST_CREDS }} - GOOGLE_ADS_TEST_CREDS: ${{ secrets.GOOGLE_ADS_TEST_CREDS }} - GOOGLE_ANALYTICS_V4_TEST_CREDS: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS }} - GOOGLE_ANALYTICS_V4_TEST_CREDS_SRV_ACC: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS_SRV_ACC }} - GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD }} - GOOGLE_CLOUD_STORAGE_TEST_CREDS: ${{ secrets.GOOGLE_CLOUD_STORAGE_TEST_CREDS }} - GOOGLE_DIRECTORY_TEST_CREDS: ${{ secrets.GOOGLE_DIRECTORY_TEST_CREDS }} - GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS }} - GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC }} - GOOGLE_SHEETS_TESTS_CREDS: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS }} - GOOGLE_SHEETS_TESTS_CREDS_SRV_ACC: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS_SRV_ACC }} - GOOGLE_SHEETS_TESTS_CREDS_OLD: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS_OLD }} - GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS: ${{ secrets.GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS }} - GREENHOUSE_TEST_CREDS: ${{ secrets.GREENHOUSE_TEST_CREDS }} - GREENHOUSE_TEST_CREDS_LIMITED: ${{ secrets.GREENHOUSE_TEST_CREDS_LIMITED }} - HARVEST_INTEGRATION_TESTS_CREDS: ${{ secrets.HARVEST_INTEGRATION_TESTS_CREDS }} - HUBSPOT_INTEGRATION_TESTS_CREDS: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS }} - HUBSPOT_INTEGRATION_TESTS_CREDS_OAUTH: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS_OAUTH }} - INSTAGRAM_INTEGRATION_TESTS_CREDS: ${{ secrets.INSTAGRAM_INTEGRATION_TESTS_CREDS }} - INTERCOM_INTEGRATION_TEST_CREDS: ${{ secrets.INTERCOM_INTEGRATION_TEST_CREDS }} - ITERABLE_INTEGRATION_TEST_CREDS: ${{ secrets.ITERABLE_INTEGRATION_TEST_CREDS }} - JIRA_INTEGRATION_TEST_CREDS: ${{ secrets.JIRA_INTEGRATION_TEST_CREDS }} - KLAVIYO_TEST_CREDS: ${{ secrets.KLAVIYO_TEST_CREDS }} - SOURCE_ASANA_TEST_CREDS: ${{ secrets.SOURCE_ASANA_TEST_CREDS }} - LEVER_HIRING_INTEGRATION_TEST_CREDS: ${{ secrets.LEVER_HIRING_INTEGRATION_TEST_CREDS }} - LOOKER_INTEGRATION_TEST_CREDS: ${{ secrets.LOOKER_INTEGRATION_TEST_CREDS }} - MAILCHIMP_TEST_CREDS: ${{ secrets.MAILCHIMP_TEST_CREDS }} - MICROSOFT_TEAMS_TEST_CREDS: ${{ secrets.MICROSOFT_TEAMS_TEST_CREDS }} - MIXPANEL_INTEGRATION_TEST_CREDS: ${{ secrets.MIXPANEL_INTEGRATION_TEST_CREDS }} - MSSQL_RDS_TEST_CREDS: ${{ secrets.MSSQL_RDS_TEST_CREDS }} - PAYPAL_TRANSACTION_CREDS: ${{ secrets.SOURCE_PAYPAL_TRANSACTION_CREDS }} - POSTGRES_SSH_KEY_TEST_CREDS: ${{ secrets.POSTGRES_SSH_KEY_TEST_CREDS }} - POSTGRES_SSH_PWD_TEST_CREDS: ${{ secrets.POSTGRES_SSH_PWD_TEST_CREDS }} - MYSQL_SSH_KEY_TEST_CREDS: ${{ secrets.MYSQL_SSH_KEY_TEST_CREDS }} - MYSQL_SSH_PWD_TEST_CREDS: ${{ secrets.MYSQL_SSH_PWD_TEST_CREDS }} - POSTHOG_TEST_CREDS: ${{ secrets.POSTHOG_TEST_CREDS }} - PIPEDRIVE_INTEGRATION_TESTS_CREDS: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS }} - PIPEDRIVE_INTEGRATION_TESTS_CREDS_OAUTH: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS_OAUTH }} - PIPEDRIVE_INTEGRATION_TESTS_CREDS_OLD: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS_OLD }} - RECHARGE_INTEGRATION_TEST_CREDS: ${{ secrets.RECHARGE_INTEGRATION_TEST_CREDS }} - QUICKBOOKS_TEST_CREDS: ${{ secrets.QUICKBOOKS_TEST_CREDS }} - SALESFORCE_BULK_INTEGRATION_TESTS_CREDS: ${{ secrets.SALESFORCE_BULK_INTEGRATION_TESTS_CREDS }} - SALESFORCE_INTEGRATION_TESTS_CREDS: ${{ secrets.SALESFORCE_INTEGRATION_TESTS_CREDS }} - SENDGRID_INTEGRATION_TEST_CREDS: ${{ secrets.SENDGRID_INTEGRATION_TEST_CREDS }} - SHOPIFY_INTEGRATION_TEST_CREDS: ${{ secrets.SHOPIFY_INTEGRATION_TEST_CREDS }} - SHOPIFY_INTEGRATION_TEST_OAUTH_CREDS: ${{ secrets.SHOPIFY_INTEGRATION_TEST_OAUTH_CREDS }} - SOURCE_OKTA_TEST_CREDS: ${{ secrets.SOURCE_OKTA_TEST_CREDS }} - SOURCE_SLACK_TEST_CREDS: ${{ secrets.SOURCE_SLACK_TEST_CREDS }} - SOURCE_SLACK_OAUTH_TEST_CREDS: ${{ secrets.SOURCE_SLACK_OAUTH_TEST_CREDS }} - SOURCE_US_CENSUS_TEST_CREDS: ${{ secrets.SOURCE_US_CENSUS_TEST_CREDS }} - SMARTSHEETS_TEST_CREDS: ${{ secrets.SMARTSHEETS_TEST_CREDS }} - SOURCE_SNAPCHAT_MARKETING_CREDS: ${{ secrets.SOURCE_SNAPCHAT_MARKETING_CREDS }} - SNOWFLAKE_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_INTEGRATION_TEST_CREDS }} - SNOWFLAKE_S3_COPY_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_S3_COPY_INTEGRATION_TEST_CREDS }} - SNOWFLAKE_GCS_COPY_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_GCS_COPY_INTEGRATION_TEST_CREDS }} - SOURCE_SQUARE_CREDS: ${{ secrets.SOURCE_SQUARE_CREDS }} - SOURCE_MARKETO_TEST_CREDS: ${{ secrets.SOURCE_MARKETO_TEST_CREDS }} - SOURCE_RECURLY_INTEGRATION_TEST_CREDS: ${{ secrets.SOURCE_RECURLY_INTEGRATION_TEST_CREDS }} - SOURCE_S3_TEST_CREDS: ${{ secrets.SOURCE_S3_TEST_CREDS }} - SOURCE_S3_PARQUET_CREDS: ${{ secrets.SOURCE_S3_PARQUET_CREDS }} - SOURCE_SHORTIO_TEST_CREDS: ${{ secrets.SOURCE_SHORTIO_TEST_CREDS }} - SOURCE_STRIPE_CREDS: ${{ secrets.SOURCE_STRIPE_CREDS }} - STRIPE_INTEGRATION_CONNECTED_ACCOUNT_TEST_CREDS: ${{ secrets.STRIPE_INTEGRATION_CONNECTED_ACCOUNT_TEST_CREDS }} - SURVEYMONKEY_TEST_CREDS: ${{ secrets.SURVEYMONKEY_TEST_CREDS }} - TEMPO_INTEGRATION_TEST_CREDS: ${{ secrets.TEMPO_INTEGRATION_TEST_CREDS }} - TRELLO_TEST_CREDS: ${{ secrets.TRELLO_TEST_CREDS }} - TWILIO_TEST_CREDS: ${{ secrets.TWILIO_TEST_CREDS }} - SOURCE_TYPEFORM_CREDS: ${{ secrets.SOURCE_TYPEFORM_CREDS }} - ZENDESK_CHAT_INTEGRATION_TEST_CREDS: ${{ secrets.ZENDESK_CHAT_INTEGRATION_TEST_CREDS }} - ZENDESK_SUNSHINE_TEST_CREDS: ${{ secrets.ZENDESK_SUNSHINE_TEST_CREDS }} - ZENDESK_TALK_TEST_CREDS: ${{ secrets.ZENDESK_TALK_TEST_CREDS }} - ZENDESK_SUPPORT_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_TEST_CREDS }} - ZENDESK_SUPPORT_OAUTH_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_OAUTH_TEST_CREDS }} - ZOOM_INTEGRATION_TEST_CREDS: ${{ secrets.ZOOM_INTEGRATION_TEST_CREDS }} - PLAID_INTEGRATION_TEST_CREDS: ${{ secrets.PLAID_INTEGRATION_TEST_CREDS }} - DESTINATION_S3_INTEGRATION_TEST_CREDS: ${{ secrets.DESTINATION_S3_INTEGRATION_TEST_CREDS }} - DESTINATION_AZURE_BLOB_CREDS: ${{ secrets.DESTINATION_AZURE_BLOB_CREDS }} - DESTINATION_GCS_CREDS: ${{ secrets.DESTINATION_GCS_CREDS }} - DESTINATION_DYNAMODB_TEST_CREDS: ${{ secrets.DESTINATION_DYNAMODB_TEST_CREDS }} - APIFY_INTEGRATION_TEST_CREDS: ${{ secrets.APIFY_INTEGRATION_TEST_CREDS }} - SOURCE_ZUORA_TEST_CREDS: ${{ secrets.SOURCE_ZUORA_TEST_CREDS }} - SOURCE_CLOSE_COM_CREDS: ${{ secrets.SOURCE_CLOSE_COM_CREDS }} - SOURCE_BAMBOO_HR_CREDS: ${{ secrets.SOURCE_BAMBOO_HR_CREDS }} - SOURCE_LINKEDIN_ADS_TEST_CREDS: ${{ secrets.SOURCE_LINKEDIN_ADS_TEST_CREDS }} - SOURCE_BIGCOMMERCE_CREDS: ${{ secrets.SOURCE_BIGCOMMERCE_CREDS }} - SOURCE_TIKTOK_MARKETING_TEST_CREDS: ${{ secrets.SOURCE_TIKTOK_MARKETING_TEST_CREDS }} - SOURCE_TIKTOK_MARKETING_PROD_TEST_CREDS: ${{ secrets.SOURCE_TIKTOK_MARKETING_PROD_TEST_CREDS }} - DESTINATION_DATABRICKS_CREDS: ${{ secrets.DESTINATION_DATABRICKS_CREDS }} - MONGODB_TEST_CREDS: ${{ secrets.MONGODB_TEST_CREDS }} - SOURCE_ONESIGNAL_TEST_CREDS: ${{ secrets.SOURCE_ONESIGNAL_TEST_CREDS }} - SOURCE_SALESLOFT_TEST_CREDS: ${{ secrets.SOURCE_SALESLOFT_TEST_CREDS }} - SOURCE_AMAZON_SQS_TEST_CREDS: ${{ secrets.SOURCE_AMAZON_SQS_TEST_CREDS }} - SOURCE_FRESHSERVICE_TEST_CREDS: ${{ secrets.SOURCE_FRESHSERVICE_TEST_CREDS }} - SOURCE_LEMLIST_TEST_CREDS: ${{ secrets.SOURCE_LEMLIST_TEST_CREDS }} - SOURCE_STRAVA_TEST_CREDS: ${{ secrets.SOURCE_STRAVA_TEST_CREDS }} + SECRETS_JSON: ${{ toJson(secrets) }} + GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} - run: | ./tools/bin/ci_integration_test.sh ${{ github.event.inputs.connector }} name: test ${{ github.event.inputs.connector }} diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index 69147692d89fe..896ebcb4a3c47 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -1,16 +1,46 @@ #!/usr/bin/env bash . tools/lib/lib.sh +. tools/lib/gcp-token.sh set -e +COMMAND_NAME=$1 + +# all secrets will be loaded if the second argument is not present +CONNECTOR_FULLNAME=${2:-all} +CONNECTOR_NAME=`echo ${CONNECTOR_FULLNAME} | rev | cut -d'/' -f1 | rev` + +GSM_SCOPES="https://www.googleapis.com/auth/cloud-platform" + +declare -A SECRET_MAP + + function write_standard_creds() { local connector_name=$1 local creds=$2 local cred_filename=${3:-config.json} + if [[ $CONNECTOR_NAME != "all" && ${connector_name} != ${CONNECTOR_NAME} ]]; then + return 0 + fi + local key="${connector_name}#${cred_filename}" + [[ -z "${creds}" ]] && error "Don't find data for the connector '${key})" + + if [ -v SECRET_MAP[${key}] ]; then + echo "The connector '${key}' was added before" + return 0 + fi + SECRET_MAP[${key}]="${creds}" + return 0 +} + +function _write_standard_creds() { + local connector_name=$1 + local creds=$2 + local cred_filename=$3 [ -z "$connector_name" ] && error "Empty connector name" - [ -z "$creds" ] && error "Creds not set for $connector_name" + [ -z "$creds" ] && error "!!!!!Creds not set for $connector_name" if [ "$connector_name" = "base-normalization" ]; then local secrets_dir="airbyte-integrations/bases/${connector_name}/secrets" @@ -21,6 +51,89 @@ function write_standard_creds() { echo "$creds" > "${secrets_dir}/${cred_filename}" } +function save_all() { + for key in "${!SECRET_MAP[@]}"; do + local connector_name=$(echo ${key} | cut -d'#' -f1) + local cred_filename=$(echo ${key} | cut -d'#' -f2) + local creds=${SECRET_MAP[${key}]} + _write_standard_creds ${connector_name} "${creds}" ${cred_filename} + done + return 0 +} + + +function export_github_secrets(){ + local pairs=`echo ${SECRETS_JSON} | jq -c 'keys[] as $k | "\($k)=\(.[$k])"'` + while read pair; do + local key=`echo ${pair} | cut -d'=' -f1` + key=${key#?} + local value=`echo ${pair} | cut -d'=' -f2-` + value=${value%?} + if [[ "$key" == *"_CREDS"* ]]; then + declare -gxr "${key}"="${value}" + else + echo "skip the env key: ${key}" + fi + done <<< ${pairs} + unset SECRETS_JSON +} + +function export_gsm_secrets(){ + local config_file=`mktemp` + echo "${GCP_GSM_CREDENTIALS}" > ${config_file} + local access_token=$(get_gcp_access_token "${config_file}" "${GSM_SCOPES}") + local project_id=$(parse_project_id "${config_file}") + rm ${config_file} + + # docs: https://cloud.google.com/secret-manager/docs/filtering#api + local filter="name:SECRET_" + [[ ${CONNECTOR_NAME} != "all" ]] && filter="${filter} AND labels.connector=${CONNECTOR_NAME}" + local uri="https://secretmanager.googleapis.com/v1/projects/${project_id}/secrets" + local next_token='' + while true; do + local data=$(curl -s --get --fail "${uri}" \ + --data-urlencode "filter=${filter}" \ + --data-urlencode "pageToken=${next_token}" \ + --header "authorization: Bearer ${access_token}" \ + --header "content-type: application/json" \ + --header "x-goog-user-project: ${project_id}") + [[ -z ${data} ]] && error "Can't load secrets' list" + for row in $(echo "${data}" | jq -r '.secrets[] | @base64'); do + local secret_info=$(echo ${row} | base64 --decode) + local secret_name=$(echo ${secret_info}| jq -r .name) + local label_filename=$(echo ${secret_info}| jq -r '.labels.filename // "config"') + local label_connector=$(echo ${secret_info}| jq -r '.labels.connector // ""') + local label_command=$(echo ${secret_info}| jq -r ".labels.command // \"${COMMAND_NAME}\"") + + # skip secrets without the label "connector" + [[ -z ${label_connector} ]] && continue + # skip secrets for other comments + # all secrets without the "command" label will be added too + [[ ${label_command} != ${COMMAND_NAME} ]] && continue + # all secret file names should be finished with ".json" + # but '.' cant be used + local filename="${label_filename}.json" + echo "found the Google secret: ${secret_name} => ${filename} for the command '${label_command}'" + local secret_uri="https://secretmanager.googleapis.com/v1/${secret_name}/versions/latest:access" + local secret_data=$(curl -s --get --fail "${secret_uri}" \ + --header "authorization: Bearer ${access_token}" \ + --header "content-type: application/json" \ + --header "x-goog-user-project: ${project_id}") + [[ -z ${secret_data} ]] && error "Can't load secrets' list" + + secret_data=$(echo ${secret_data} | jq -r '.payload.data // ""' | base64 -d) + write_standard_creds "${label_connector}" "${secret_data}" "${filename}" + done + next_token=`echo ${data} | jq -r '.nextPageToken // ""'` + [[ -z ${next_token} ]] && break + done + return 0 +} + +export_gsm_secrets +export_github_secrets + + # Please maintain this organisation and alphabetise. write_standard_creds destination-bigquery "$BIGQUERY_INTEGRATION_TEST_CREDS" "credentials.json" write_standard_creds destination-bigquery-denormalized "$BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS" "credentials.json" @@ -70,6 +183,7 @@ write_standard_creds source-exchange-rates "$EXCHANGE_RATES_TEST_CREDS" write_standard_creds source-file "$GOOGLE_CLOUD_STORAGE_TEST_CREDS" "gcs.json" write_standard_creds source-file "$AWS_S3_INTEGRATION_TEST_CREDS" "aws.json" write_standard_creds source-file "$AZURE_STORAGE_INTEGRATION_TEST_CREDS" "azblob.json" +write_standard_creds source-file "$FILE_SECURE_HTTPS_TEST_CREDS" write_standard_creds source-file-secure "$FILE_SECURE_HTTPS_TEST_CREDS" write_standard_creds source-freshdesk "$FRESHDESK_TEST_CREDS" write_standard_creds source-freshservice "$SOURCE_FRESHSERVICE_TEST_CREDS" @@ -156,3 +270,7 @@ write_standard_creds source-zendesk-support "$ZENDESK_SUPPORT_OAUTH_TEST_CREDS" write_standard_creds source-zendesk-talk "$ZENDESK_TALK_TEST_CREDS" write_standard_creds source-zoom-singer "$ZOOM_INTEGRATION_TEST_CREDS" write_standard_creds source-zuora "$SOURCE_ZUORA_TEST_CREDS" + +save_all +exit $? + diff --git a/tools/lib/gcp-token.sh b/tools/lib/gcp-token.sh new file mode 100644 index 0000000000000..4f4042686828d --- /dev/null +++ b/tools/lib/gcp-token.sh @@ -0,0 +1,90 @@ +#!/usr/bin/env bash +# Test script to access/generate secrets in Secret Manager + +# PROJECT="engineering-devops" +# SCOPE="https://www.googleapis.com/auth/cloud-platform" +# SERVICE_ACCOUNT_FILE=secret-manager.json +# SECRET=my-secret +TOKEN_TTL=3600 + + +_var2base64() { + printf "$1" | _urlencode_base64 +} + +_urlencode_base64() { + base64 | tr '/+' '_-' | tr -d '=\n' +} + +function _parse_token_uri(){ + local config_file=$1 + local token_uri=$(jq -r .token_uri ${config_file}) + echo "${token_uri}" +} + +function _generate_jwt() { + # Generate JWT token by a service account json file and scopes + local config_file=$1 + local scopes=$2 + + local now="$(date +%s)" + local expiration_time=$((${now} + ${TOKEN_TTL})) + # parse a file with credentials + local private_key=$(jq -r .private_key ${config_file}) + local client_email=$(jq -r .client_email ${config_file}) + local token_uri=$(_parse_token_uri "${config_file}") + + local claim=$(echo "{ + \"iat\": ${now}, + \"iss\": \"${client_email}\", + \"scope\": \"$scopes\", + \"aud\": \"${token_uri}\", + \"exp\":${expiration_time} + }" | jq -c) + local headers='{"typ":"JWT","alg":"RS256"}' + local body="$(_var2base64 "$headers").$(_var2base64 "$claim")" + local signature=$(openssl dgst -sha256 -sign <(echo "$private_key") <(printf "$body") | _urlencode_base64) + echo "$body.$signature" +} + +function parse_project_id(){ + # find a project_id into config file + local config_file=$1 + local project_id=$(jq -r .project_id ${config_file}) + echo "${project_id}" +} + +function get_gcp_access_token() { + # Generate an access token by a service account json file and scopes + local config_file="$1" + local scopes="$2" + local jwt=`_generate_jwt "${config_file}" "$scopes"` + local token_uri=$(_parse_token_uri "${config_file}") + local data=$(curl -s -X POST ${token_uri} \ + --data-urlencode "assertion=${jwt}" \ + --data-urlencode 'grant_type=urn:ietf:params:oauth:grant-type:jwt-bearer' + ) + echo $data | jq -r .access_token +} + +# token=$(get_access_token $SERVICE_ACCOUNT_FILE $SCOPE) + +# # Test fetching a secret. Use the latest version + +# curl "https://secretmanager.googleapis.com/v1/projects/$PROJECT/secrets/$SECRET/versions/latest:access" \ +# --request "GET" \ +# --header "authorization: Bearer $token" \ +# --header "content-type: application/json" \ +# --header "x-goog-user-project: $PROJECT" + +# # Test writing a new secret + +# SECRET_DATA=$(echo "seCr3t" | base64) + + +# curl "https://secretmanager.googleapis.com/v1/projects/$PROJECT/secrets/$SECRET:addVersion" \ +# --request "POST" \ +# --header "authorization: Bearer $token" \ +# --header "content-type: application/json" \ +# --header "x-goog-user-project: $PROJECT" \ +# --data "{\"payload\": {\"data\": \"${SECRET_DATA}\"}}" \ No newline at end of file From ddd0922c1f7fb39b8208391d432db8ee2cf30a77 Mon Sep 17 00:00:00 2001 From: antixar Date: Mon, 1 Nov 2021 19:42:50 +0200 Subject: [PATCH 09/36] update CI secrets logic --- .github/workflows/publish-command.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index a35439e0a42a6..27fcf07841531 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -70,10 +70,6 @@ jobs: - name: Install Pyenv run: python3 -m pip install virtualenv==16.7.9 --user - name: Write Integration Test Credentials # TODO DRY this with test-command.yml - run: ./tools/bin/ci_credentials.sh test ${{ github.event.inputs.connector }} - env: - SECRETS_JSON: ${{ toJson(secrets) }} - GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} run: ./tools/bin/ci_credentials.sh publish ${{ github.event.inputs.connector }} env: SECRETS_JSON: ${{ toJson(secrets) }} From 9012db9b8f7936c7a3451509054080922faa6172 Mon Sep 17 00:00:00 2001 From: antixar Date: Mon, 1 Nov 2021 19:45:10 +0200 Subject: [PATCH 10/36] remove debug data --- tools/lib/gcp-token.sh | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/tools/lib/gcp-token.sh b/tools/lib/gcp-token.sh index 4f4042686828d..00ac396ba50b5 100644 --- a/tools/lib/gcp-token.sh +++ b/tools/lib/gcp-token.sh @@ -67,24 +67,3 @@ function get_gcp_access_token() { echo $data | jq -r .access_token } -# token=$(get_access_token $SERVICE_ACCOUNT_FILE $SCOPE) - -# # Test fetching a secret. Use the latest version - -# curl "https://secretmanager.googleapis.com/v1/projects/$PROJECT/secrets/$SECRET/versions/latest:access" \ -# --request "GET" \ -# --header "authorization: Bearer $token" \ -# --header "content-type: application/json" \ -# --header "x-goog-user-project: $PROJECT" - -# # Test writing a new secret - -# SECRET_DATA=$(echo "seCr3t" | base64) - - -# curl "https://secretmanager.googleapis.com/v1/projects/$PROJECT/secrets/$SECRET:addVersion" \ -# --request "POST" \ -# --header "authorization: Bearer $token" \ -# --header "content-type: application/json" \ -# --header "x-goog-user-project: $PROJECT" \ -# --data "{\"payload\": {\"data\": \"${SECRET_DATA}\"}}" \ No newline at end of file From 3f80d96b3ad77430c81f3f53deaf32dcfc237a2f Mon Sep 17 00:00:00 2001 From: antixar Date: Tue, 2 Nov 2021 00:04:18 +0200 Subject: [PATCH 11/36] add a debug message --- tools/bin/ci_credentials.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index 896ebcb4a3c47..4791b636f9beb 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -48,6 +48,7 @@ function _write_standard_creds() { local secrets_dir="airbyte-integrations/connectors/${connector_name}/secrets" fi mkdir -p "$secrets_dir" + echo "Saved a secret => ${secrets_dir}/${cred_filename}" echo "$creds" > "${secrets_dir}/${cred_filename}" } @@ -57,6 +58,7 @@ function save_all() { local cred_filename=$(echo ${key} | cut -d'#' -f2) local creds=${SECRET_MAP[${key}]} _write_standard_creds ${connector_name} "${creds}" ${cred_filename} + done return 0 } @@ -71,8 +73,6 @@ function export_github_secrets(){ value=${value%?} if [[ "$key" == *"_CREDS"* ]]; then declare -gxr "${key}"="${value}" - else - echo "skip the env key: ${key}" fi done <<< ${pairs} unset SECRETS_JSON @@ -98,6 +98,8 @@ function export_gsm_secrets(){ --header "content-type: application/json" \ --header "x-goog-user-project: ${project_id}") [[ -z ${data} ]] && error "Can't load secrets' list" + [[ ${data} == "{}" ]] && data='{"secrets": []}' + for row in $(echo "${data}" | jq -r '.secrets[] | @base64'); do local secret_info=$(echo ${row} | base64 --decode) local secret_name=$(echo ${secret_info}| jq -r .name) From b4935dee58c8b8a82fc6e41e9b41aa925fa8bace Mon Sep 17 00:00:00 2001 From: antixar Date: Tue, 2 Nov 2021 14:33:14 +0200 Subject: [PATCH 12/36] fix json convertation --- tools/bin/ci_credentials.sh | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index 4791b636f9beb..fa73d6cf6b1e3 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -65,14 +65,13 @@ function save_all() { function export_github_secrets(){ - local pairs=`echo ${SECRETS_JSON} | jq -c 'keys[] as $k | "\($k)=\(.[$k])"'` - while read pair; do - local key=`echo ${pair} | cut -d'=' -f1` - key=${key#?} - local value=`echo ${pair} | cut -d'=' -f2-` - value=${value%?} + local pairs=`echo ${SECRETS_JSON} | jq -c 'keys[] as $k | {"name": $k, "value": .[$k]} | @base64'` + while read row; do + pair=$(echo "${row}" | tr -d '"' | base64 -d) + local key=$(echo ${pair} | jq -r .name) + local value=$(echo ${pair} | jq -r .value) if [[ "$key" == *"_CREDS"* ]]; then - declare -gxr "${key}"="${value}" + declare -gxr "${key}"="$(echo ${value})" fi done <<< ${pairs} unset SECRETS_JSON From d59796578bad0ae5f445aebf0062baf9a8a528e0 Mon Sep 17 00:00:00 2001 From: antixar Date: Tue, 2 Nov 2021 15:02:02 +0200 Subject: [PATCH 13/36] fix json convertation --- tools/bin/ci_credentials.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index fa73d6cf6b1e3..9e995d246a3e4 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -20,6 +20,7 @@ function write_standard_creds() { local connector_name=$1 local creds=$2 local cred_filename=${3:-config.json} + local source_name=${4:-github} if [[ $CONNECTOR_NAME != "all" && ${connector_name} != ${CONNECTOR_NAME} ]]; then return 0 fi @@ -30,6 +31,7 @@ function write_standard_creds() { echo "The connector '${key}' was added before" return 0 fi + echo "register the secret ${key} from ${source_name}" SECRET_MAP[${key}]="${creds}" return 0 } @@ -123,7 +125,7 @@ function export_gsm_secrets(){ [[ -z ${secret_data} ]] && error "Can't load secrets' list" secret_data=$(echo ${secret_data} | jq -r '.payload.data // ""' | base64 -d) - write_standard_creds "${label_connector}" "${secret_data}" "${filename}" + write_standard_creds "${label_connector}" "${secret_data}" "${filename}" "gsm" done next_token=`echo ${data} | jq -r '.nextPageToken // ""'` [[ -z ${next_token} ]] && break From f50804af0c651433a7609851516eb933868e81bf Mon Sep 17 00:00:00 2001 From: antixar Date: Thu, 4 Nov 2021 17:42:53 +0200 Subject: [PATCH 14/36] support one secret by several connectors --- tools/bin/ci_credentials.sh | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index 9e995d246a3e4..56db63223962d 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -21,6 +21,10 @@ function write_standard_creds() { local creds=$2 local cred_filename=${3:-config.json} local source_name=${4:-github} + + [ -z "$connector_name" ] && error "Empty connector name" + [ -z "$creds" ] && error "!!!!!Creds not set for $connector_name" + if [[ $CONNECTOR_NAME != "all" && ${connector_name} != ${CONNECTOR_NAME} ]]; then return 0 fi @@ -38,11 +42,11 @@ function write_standard_creds() { function _write_standard_creds() { local connector_name=$1 - local creds=$2 - local cred_filename=$3 - - [ -z "$connector_name" ] && error "Empty connector name" - [ -z "$creds" ] && error "!!!!!Creds not set for $connector_name" + local cred_filename=$2 + local creds=$3 + if jq -e . >/dev/null 2>&1 <<<${creds}; then + error "Failed to parse JSON for '${connector_name}' => ${cred_filename}" + fi if [ "$connector_name" = "base-normalization" ]; then local secrets_dir="airbyte-integrations/bases/${connector_name}/secrets" @@ -59,7 +63,7 @@ function save_all() { local connector_name=$(echo ${key} | cut -d'#' -f1) local cred_filename=$(echo ${key} | cut -d'#' -f2) local creds=${SECRET_MAP[${key}]} - _write_standard_creds ${connector_name} "${creds}" ${cred_filename} + _write_standard_creds ${connector_name} ${cred_filename} "${creds}" done return 0 @@ -88,7 +92,7 @@ function export_gsm_secrets(){ # docs: https://cloud.google.com/secret-manager/docs/filtering#api local filter="name:SECRET_" - [[ ${CONNECTOR_NAME} != "all" ]] && filter="${filter} AND labels.connector=${CONNECTOR_NAME}" + [[ ${CONNECTOR_NAME} != "all" ]] && filter="${filter} AND labels.connector:${CONNECTOR_NAME}" local uri="https://secretmanager.googleapis.com/v1/projects/${project_id}/secrets" local next_token='' while true; do @@ -105,18 +109,22 @@ function export_gsm_secrets(){ local secret_info=$(echo ${row} | base64 --decode) local secret_name=$(echo ${secret_info}| jq -r .name) local label_filename=$(echo ${secret_info}| jq -r '.labels.filename // "config"') - local label_connector=$(echo ${secret_info}| jq -r '.labels.connector // ""') + local label_connectors=$(echo ${secret_info}| jq -r '.labels.connector // ""') local label_command=$(echo ${secret_info}| jq -r ".labels.command // \"${COMMAND_NAME}\"") # skip secrets without the label "connector" - [[ -z ${label_connector} ]] && continue + [[ -z ${label_connectors} ]] && continue + if [[ "$label_connectors" != *"${CONNECTOR_NAME}"* ]]; then + echo "Not found ${CONNECTOR_NAME} info into the label 'connector' of the secret ${secret_name}" + continue + fi # skip secrets for other comments # all secrets without the "command" label will be added too [[ ${label_command} != ${COMMAND_NAME} ]] && continue # all secret file names should be finished with ".json" # but '.' cant be used local filename="${label_filename}.json" - echo "found the Google secret: ${secret_name} => ${filename} for the command '${label_command}'" + echo "found the Google secret of ${label_connectors}: ${secret_name} => ${filename} for the command '${label_command}'" local secret_uri="https://secretmanager.googleapis.com/v1/${secret_name}/versions/latest:access" local secret_data=$(curl -s --get --fail "${secret_uri}" \ --header "authorization: Bearer ${access_token}" \ @@ -125,7 +133,7 @@ function export_gsm_secrets(){ [[ -z ${secret_data} ]] && error "Can't load secrets' list" secret_data=$(echo ${secret_data} | jq -r '.payload.data // ""' | base64 -d) - write_standard_creds "${label_connector}" "${secret_data}" "${filename}" "gsm" + write_standard_creds "${CONNECTOR_NAME}" "${secret_data}" "${filename}" "gsm" done next_token=`echo ${data} | jq -r '.nextPageToken // ""'` [[ -z ${next_token} ]] && break From 0c0608099ea48a33de535295268fd8c2433e49c4 Mon Sep 17 00:00:00 2001 From: Maksym Pavlenok Date: Mon, 8 Nov 2021 10:13:01 +0200 Subject: [PATCH 15/36] Update tools/bin/ci_credentials.sh Co-authored-by: Sherif A. Nada --- tools/bin/ci_credentials.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index 56db63223962d..f608567f4356a 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -118,7 +118,7 @@ function export_gsm_secrets(){ echo "Not found ${CONNECTOR_NAME} info into the label 'connector' of the secret ${secret_name}" continue fi - # skip secrets for other comments + # skip secrets for other commands # all secrets without the "command" label will be added too [[ ${label_command} != ${COMMAND_NAME} ]] && continue # all secret file names should be finished with ".json" From ecb0b2f2d986e37278c5a875ee7fac374af35755 Mon Sep 17 00:00:00 2001 From: Maksym Pavlenok Date: Mon, 8 Nov 2021 10:50:22 +0200 Subject: [PATCH 16/36] Update tools/bin/ci_credentials.sh Co-authored-by: Sherif A. Nada --- tools/bin/ci_credentials.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index f608567f4356a..1dac0fdb3d77c 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -122,7 +122,7 @@ function export_gsm_secrets(){ # all secrets without the "command" label will be added too [[ ${label_command} != ${COMMAND_NAME} ]] && continue # all secret file names should be finished with ".json" - # but '.' cant be used + # but '.' cant be used in google, so we append it local filename="${label_filename}.json" echo "found the Google secret of ${label_connectors}: ${secret_name} => ${filename} for the command '${label_command}'" local secret_uri="https://secretmanager.googleapis.com/v1/${secret_name}/versions/latest:access" From 6a235b0b303bf801c917bca5b9f92fd2d4a4948f Mon Sep 17 00:00:00 2001 From: Maksym Pavlenok Date: Mon, 8 Nov 2021 11:09:41 +0200 Subject: [PATCH 17/36] Update tools/bin/ci_credentials.sh Co-authored-by: LiRen Tu --- tools/bin/ci_credentials.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index 1dac0fdb3d77c..0ee0fd27437fe 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -29,7 +29,7 @@ function write_standard_creds() { return 0 fi local key="${connector_name}#${cred_filename}" - [[ -z "${creds}" ]] && error "Don't find data for the connector '${key})" + [[ -z "${creds}" ]] && error "Empty credential for the connector '${key})" if [ -v SECRET_MAP[${key}] ]; then echo "The connector '${key}' was added before" From c36fa8abf488cae5dc3b018ef4c2b8238b4110d3 Mon Sep 17 00:00:00 2001 From: antixar Date: Mon, 8 Nov 2021 11:58:10 +0200 Subject: [PATCH 18/36] update function names --- tools/bin/ci_credentials.sh | 27 ++++++++++++--------------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index d3c21beb27411..37cd94ee1db77 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -5,10 +5,8 @@ set -e -COMMAND_NAME=$1 - # all secrets will be loaded if the second argument is not present -CONNECTOR_FULLNAME=${2:-all} +CONNECTOR_FULLNAME=${1:-all} CONNECTOR_NAME=`echo ${CONNECTOR_FULLNAME} | rev | cut -d'/' -f1 | rev` GSM_SCOPES="https://www.googleapis.com/auth/cloud-platform" @@ -29,7 +27,7 @@ function write_standard_creds() { return 0 fi local key="${connector_name}#${cred_filename}" - [[ -z "${creds}" ]] && error "Empty credential for the connector '${key})" + [[ -z "${creds}" ]] && error "Empty credential for the connector '${key} from ${source_name}" if [ -v SECRET_MAP[${key}] ]; then echo "The connector '${key}' was added before" @@ -40,7 +38,7 @@ function write_standard_creds() { return 0 } -function _write_standard_creds() { +function write_secret_to_disk() { local connector_name=$1 local cred_filename=$2 local creds=$3 @@ -58,12 +56,12 @@ function _write_standard_creds() { echo "$creds" > "${secrets_dir}/${cred_filename}" } -function save_all() { +function write_all_secrets() { for key in "${!SECRET_MAP[@]}"; do local connector_name=$(echo ${key} | cut -d'#' -f1) local cred_filename=$(echo ${key} | cut -d'#' -f2) local creds=${SECRET_MAP[${key}]} - _write_standard_creds ${connector_name} ${cred_filename} "${creds}" + write_secret_to_disk ${connector_name} ${cred_filename} "${creds}" done return 0 @@ -102,7 +100,9 @@ function export_gsm_secrets(){ --header "authorization: Bearer ${access_token}" \ --header "content-type: application/json" \ --header "x-goog-user-project: ${project_id}") - [[ -z ${data} ]] && error "Can't load secrets' list" + [[ -z ${data} ]] && error "Can't load secret for connector ${CONNECTOR_NAME}" + # GSM returns an empty JSON object if secrets are not found. + # It breaks JSON parsing by the 'jq' utility. The simplest fix is response normalization [[ ${data} == "{}" ]] && data='{"secrets": []}' for row in $(echo "${data}" | jq -r '.secrets[] | @base64'); do @@ -110,7 +110,6 @@ function export_gsm_secrets(){ local secret_name=$(echo ${secret_info}| jq -r .name) local label_filename=$(echo ${secret_info}| jq -r '.labels.filename // "config"') local label_connectors=$(echo ${secret_info}| jq -r '.labels.connector // ""') - local label_command=$(echo ${secret_info}| jq -r ".labels.command // \"${COMMAND_NAME}\"") # skip secrets without the label "connector" [[ -z ${label_connectors} ]] && continue @@ -118,13 +117,11 @@ function export_gsm_secrets(){ echo "Not found ${CONNECTOR_NAME} info into the label 'connector' of the secret ${secret_name}" continue fi - # skip secrets for other commands - # all secrets without the "command" label will be added too - [[ ${label_command} != ${COMMAND_NAME} ]] && continue + # all secret file names should be finished with ".json" # but '.' cant be used in google, so we append it local filename="${label_filename}.json" - echo "found the Google secret of ${label_connectors}: ${secret_name} => ${filename} for the command '${label_command}'" + echo "found the Google secret of ${label_connectors}: ${secret_name} => ${filename}" local secret_uri="https://secretmanager.googleapis.com/v1/${secret_name}/versions/latest:access" local secret_data=$(curl -s --get --fail "${secret_uri}" \ --header "authorization: Bearer ${access_token}" \ @@ -199,7 +196,7 @@ write_standard_creds source-file "$AZURE_STORAGE_INTEGRATION_TEST_CREDS" "azblob write_standard_creds source-file "$FILE_SECURE_HTTPS_TEST_CREDS" write_standard_creds source-file-secure "$FILE_SECURE_HTTPS_TEST_CREDS" write_standard_creds source-freshdesk "$FRESHDESK_TEST_CREDS" -write_standard_creds source-freshsales "$SOURCE_FRESHSALES_TEST_CREDS" +write_standard_creds source-freshsales "$SOURCE_FRESHSALES_TEST_CREDS" write_standard_creds source-freshservice "$SOURCE_FRESHSERVICE_TEST_CREDS" write_standard_creds source-facebook-marketing "$FACEBOOK_MARKETING_TEST_INTEGRATION_CREDS" write_standard_creds source-facebook-pages "$FACEBOOK_PAGES_INTEGRATION_TEST_CREDS" @@ -290,6 +287,6 @@ write_standard_creds source-zendesk-talk "$ZENDESK_TALK_TEST_CREDS" write_standard_creds source-zoom-singer "$ZOOM_INTEGRATION_TEST_CREDS" write_standard_creds source-zuora "$SOURCE_ZUORA_TEST_CREDS" -save_all +write_all_secrets exit $? From 0a1a090a706f192e65802e410c33b6f30fe17480 Mon Sep 17 00:00:00 2001 From: antixar Date: Mon, 8 Nov 2021 12:28:09 +0200 Subject: [PATCH 19/36] update docs --- docs/connector-development/README.md | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/docs/connector-development/README.md b/docs/connector-development/README.md index ebf9705e57843..6e8e01034413e 100644 --- a/docs/connector-development/README.md +++ b/docs/connector-development/README.md @@ -130,10 +130,15 @@ Once you've finished iterating on the changes to a connector as specified in its ## Using credentials in CI In order to run integration tests in CI, you'll often need to inject credentials into CI. There are a few steps for doing this: - -1. **Place the credentials into Lastpass**: Airbyte uses a shared Lastpass account as the source of truth for all secrets. Place the credentials **exactly as they should be used by the connector** into a secure note i.e: it should basically be a copy paste of the `config.json` passed into a connector via the `--config` flag. We use the following naming pattern: ` creds` e.g: `source google adwords creds` or `destination snowflake creds`. -2. **Add the credentials to Github Secrets**: To inject credentials into a CI workflow, the first step is to add it to Github Secrets, specifically within the ["more-secrets" environment](https://github.com/airbytehq/airbyte/settings/environments/276695501/edit). Admin access to the Airbyte repo is required to do this. All Airbyte engineers have admin access and should be able to do this themselves. External contributors or contractors will need to request this from their team lead or project manager who should have admin access. Follow the same naming pattern as all the other secrets e.g: if you are placing credentials for source google adwords, name the secret `SOURCE_GOOGLE_ADWORDS_CREDS`. After doing this step, the secret will be available in the relevant Github workflows using the workflow secrets syntax. -3. **Inject the credentials into test and publish CI workflows**: edit the files `.github/workflows/publish-command.yml` and `.github/workflows/test-command.yml` to inject the secret into the CI run. This will make these secrets available to the `/test` and `/publish` commands. -4. **During CI, write the secret from env variables to the connector directory**: edit `tools/bin/ci_credentials.sh` to write the secret into the `secrets/` directory of the relevant connector. +1. **Place the credentials into Google Secret Manager(GSM)**: Airbyte uses a project 'Google Secret Manager' service as the source of truth for all secrets. Place the credentials **exactly as they should be used by the connector** into a GSM secret i.e.: it should basically be a copy paste of the `config.json` passed into a connector via the `--config` flag. We use the following naming pattern: `SECRET__CREDS` e.g: `SECRET_SOURCE-S3_CREDS` or `SECRET_DESTINATION-SNOWFLAKE_CREDS`. Access to the GSM storage is limited. Each developer should have the role `Development_CI_Secrets` of the project `dataline-integration-testing`. +2. **Add the GSM secret's labels**: + * `connector` (required) -- unique connector's name or set of connectors' names with '_' as delimiter i.e.: `connector=source-s3`, `connector=destination-snowflake` + * `filename` (optional) -- custom target secret file. Unfortunately Google doesn't use '.' into labels' values and so Airbyte CI scripts will add '.json' to the end automatically. By default secrets will be saved to `./secrets/config.json` i.e: `filename=config_auth` => `secrets/config_auth.json` +3. That should be it. + +#### How to migrate to the new secrets' logic: +1. Create all necessary secrets how it is explained above. +2. Remove all lines with old connector's GutHub secrets into this file: tools/bin/ci_credentials.sh +3. Remove all old secrets from Githubthe secret from env variables to the connector directory**: edit `tools/bin/ci_credentials.sh` to write the secret into the `secrets/` directory of the relevant connector. 5. That should be it. From ef5585c39fc0dee1502c24c7844fff1f5146de38 Mon Sep 17 00:00:00 2001 From: antixar Date: Mon, 8 Nov 2021 12:31:24 +0200 Subject: [PATCH 20/36] update docs --- .github/workflows/publish-command.yml | 2 +- .github/workflows/test-command.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index 27fcf07841531..bdffe002ae456 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -70,7 +70,7 @@ jobs: - name: Install Pyenv run: python3 -m pip install virtualenv==16.7.9 --user - name: Write Integration Test Credentials # TODO DRY this with test-command.yml - run: ./tools/bin/ci_credentials.sh publish ${{ github.event.inputs.connector }} + run: ./tools/bin/ci_credentials.sh ${{ github.event.inputs.connector }} env: SECRETS_JSON: ${{ toJson(secrets) }} GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} diff --git a/.github/workflows/test-command.yml b/.github/workflows/test-command.yml index 559da9a1cea69..1b05f0e974c57 100644 --- a/.github/workflows/test-command.yml +++ b/.github/workflows/test-command.yml @@ -65,7 +65,7 @@ jobs: - name: Install Pyenv run: python3 -m pip install virtualenv==16.7.9 --user - name: Write Integration Test Credentials - run: ./tools/bin/ci_credentials.sh test ${{ github.event.inputs.connector }} + run: ./tools/bin/ci_credentials.sh ${{ github.event.inputs.connector }} env: SECRETS_JSON: ${{ toJson(secrets) }} GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} From f9c61f5b73835efab4a9d4532af09ac710084924 Mon Sep 17 00:00:00 2001 From: antixar Date: Mon, 8 Nov 2021 12:41:54 +0200 Subject: [PATCH 21/36] reset failed changes --- .../source-zendesk-support/Dockerfile | 2 +- .../acceptance-test-config.yml | 3 - .../schemas/ticket_metrics.json | 2 +- .../source_zendesk_support/source.py | 2 - .../source_zendesk_support/spec.json | 65 +------------------ .../source_zendesk_support/streams.py | 47 +++++--------- docs/integrations/sources/zendesk-support.md | 33 +++++----- 7 files changed, 37 insertions(+), 117 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile index 42475d2905f52..f44e3e602d74d 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile +++ b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile @@ -25,5 +25,5 @@ COPY source_zendesk_support ./source_zendesk_support ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.3 LABEL io.airbyte.name=airbyte/source-zendesk-support diff --git a/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml index b708da780ae85..61da7ab6b6429 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml @@ -7,13 +7,10 @@ tests: connection: - config_path: "secrets/config.json" status: "succeed" - - config_path: "secrets/config_oauth.json" - status: "succeed" - config_path: "integration_tests/invalid_config.json" status: "failed" discovery: - config_path: "secrets/config.json" - - config_path: "secrets/config_oauth.json" basic_read: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_metrics.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_metrics.json index a139c863d2b91..454ab85dffc8a 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_metrics.json +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_metrics.json @@ -132,7 +132,7 @@ }, "initially_assigned_at": { "type": ["null", "string"], - "format": "date-time" + "format": "datetime" }, "assigned_at": { "type": ["null", "string"], diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py index 5c0e8070d7ced..d7f986ad46416 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py @@ -29,8 +29,6 @@ UserSettingsStream, ) -# from airbyte_cdk.sources.streams.http.auth.token import TokenAuthenticator - class BasicApiTokenAuthenticator(TokenAuthenticator): """basic Authorization header""" diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json index f08fb1725826b..5bd782541f0f5 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/spec.json @@ -4,19 +4,13 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Source Zendesk Support Spec", "type": "object", - "required": [ - "start_date", - "subdomain", - "auth_method" - ], + "required": ["start_date", "subdomain", "auth_method"], "additionalProperties": false, "properties": { "start_date": { "type": "string", "description": "The date from which you'd like to replicate data for Zendesk Support API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.", - "examples": [ - "2020-10-15T00:00:00Z" - ], + "examples": ["2020-10-15T00:00:00Z"], "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" }, "subdomain": { @@ -29,41 +23,10 @@ "default": "api_token", "description": "Zendesk service provides 2 auth method: API token and oAuth2. Now only the first one is available. Another one will be added in the future", "oneOf": [ - { - "type": "object", - "title": "OAuth2.0 authorization", - "properties": { - "auth_method": { - "type": "string", - "const": "access_token" - }, - "client_id": { - "type": "string", - "description": "This is the name of your client for use in code." - }, - "client_secret": { - "type": "string", - "airbyte_secret": true, - "description": "This secret token is used by apps redirecting to your client. " - }, - "access_token": { - "type": "string", - "airbyte_secret": true, - "description": "Using for authorization. The access token doesn't expire." - } - }, - "required": [ - "access_token" - ], - "additionalProperties": false - }, { "title": "API Token", "type": "object", - "required": [ - "email", - "api_token" - ], + "required": ["email", "api_token"], "additionalProperties": false, "properties": { "auth_method": { @@ -84,27 +47,5 @@ ] } } - }, - "authSpecification": { - "auth_type": "oauth2.0", - "oauth2Specification": { - "rootObject": [ - "auth_method", - 0 - ], - "oauthFlowInitParameters": [ - [ - "client_id" - ], - [ - "client_secret" - ] - ], - "oauthFlowOutputParameters": [ - [ - "access_token" - ] - ] - } } } diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index a6f3021e65bfb..900b682e17e77 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -124,8 +124,7 @@ class IncrementalEntityStream(SourceZendeskSupportStream, ABC): def __init__(self, start_date: str, **kwargs): super().__init__(**kwargs) # add the custom value for skiping of not relevant records - self._start_date = self.str2datetime( - start_date) if isinstance(start_date, str) else start_date + self._start_date = self.str2datetime(start_date) if isinstance(start_date, str) else start_date # Flag for marking of completed process self._finished = False @@ -153,8 +152,7 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: # try to save maximum value of a cursor field - old_value = str((current_stream_state or {} - ).get(self.cursor_field, "")) + old_value = str((current_stream_state or {}).get(self.cursor_field, "")) new_value = str((latest_record or {}).get(self.cursor_field, "")) return {self.cursor_field: max(new_value, old_value)} @@ -214,8 +212,7 @@ def request_params( current_state = self.str2unixtime(current_state) elif not self.last_end_time: self.last_end_time = current_state - start_time = int(current_state or time.mktime( - self._start_date.timetuple())) + start_time = int(current_state or time.mktime(self._start_date.timetuple())) # +1 because the API returns all records where generated_timestamp >= start_time now = calendar.timegm(datetime.now().utctimetuple()) @@ -231,8 +228,7 @@ def request_params( def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: # try to save maximum value of a cursor field - state = super().get_updated_state( - current_stream_state=current_stream_state, latest_record=latest_record) + state = super().get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record) if self.last_end_time: state[LAST_END_TIME_KEY] = self.last_end_time @@ -336,8 +332,7 @@ class IncrementalSortedCursorStream(IncrementalUnsortedStream, ABC): def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: params = super().request_params(next_page_token=next_page_token, **kwargs) - params.update({"sort_by": self.cursor_field, - "sort_order": "desc", "limit": self.page_size}) + params.update({"sort_by": self.cursor_field, "sort_order": "desc", "limit": self.page_size}) if next_page_token: params["cursor"] = next_page_token @@ -355,8 +350,7 @@ class IncrementalSortedPageStream(IncrementalUnsortedPageStream, ABC): def request_params(self, **kwargs) -> MutableMapping[str, Any]: params = super().request_params(**kwargs) if params: - params.update({"sort_by": self.cursor_field, - "sort_order": "desc", "limit": self.page_size}) + params.update({"sort_by": self.cursor_field, "sort_order": "desc", "limit": self.page_size}) return params @@ -396,19 +390,16 @@ def stream_slices( ticket_stream_value = stream_state.get(Tickets.cursor_field) if not ticket_stream_value: # for backward compatibility because not all relevant states can have some last ticket state - ticket_stream_value = self.str2unixtime( - stream_state.get(self.cursor_field)) + ticket_stream_value = self.str2unixtime(stream_state.get(self.cursor_field)) - tickets_stream = Tickets( - start_date=self._start_date, subdomain=self._subdomain, authenticator=self.authenticator) + tickets_stream = Tickets(start_date=self._start_date, subdomain=self._subdomain, authenticator=self.authenticator) ticket_pages = defaultdict(list) last_end_time = stream_state.get(LAST_END_TIME_KEY, 0) ticket_count = 0 for ticket in tickets_stream.read_records( sync_mode=sync_mode, cursor_field=cursor_field, - stream_state={Tickets.cursor_field: ticket_stream_value, - LAST_END_TIME_KEY: last_end_time}, + stream_state={Tickets.cursor_field: ticket_stream_value, LAST_END_TIME_KEY: last_end_time}, ): if not ticket["comment_count"]: # skip tickets without comments @@ -427,11 +418,10 @@ def stream_slices( # the addl stream state fields "_last_end_time" and its value is not compatible # with comments' cursor fields. Thus we need to save it separately and add # last_end_time info for every slice - last_page = {last_times[-1] : [ticket_pages[last_times[-1]].pop(-1)]} + last_page = {last_times[-1]: [ticket_pages[last_times[-1]].pop(-1)]} new_last_times = [last_end_time] + last_times[:-1] - ticket_pages = { - new_last_times[i]: ticket_pages[last_times[i]] for i in range(len(last_times))} + ticket_pages = {new_last_times[i]: ticket_pages[last_times[i]] for i in range(len(last_times))} ticket_pages.update(last_page) self.logger.info(f"Found {ticket_count} ticket(s) with comments") @@ -441,8 +431,7 @@ def stream_slices( def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: """Adds a last cursor ticket updated time for a comment state""" - new_state = super().get_updated_state( - current_stream_state=current_stream_state, latest_record=latest_record) + new_state = super().get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record) if self._ticket_last_end_time: new_state[LAST_END_TIME_KEY] = self._ticket_last_end_time @@ -457,8 +446,7 @@ def parse_response( elif response.status_code == 404: ticket_id = stream_slice["id"] # skip 404 errors for not found tickets - self.logger.info( - f"ticket {ticket_id} not found (404 error). It could have been deleted.") + self.logger.info(f"ticket {ticket_id} not found (404 error). It could have been deleted.") else: response.raise_for_status() @@ -497,8 +485,7 @@ def request_params(self, **kwargs) -> MutableMapping[str, Any]: def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: """Need to save a cursor values as integer""" - state = super().get_updated_state( - current_stream_state=current_stream_state, latest_record=latest_record) + state = super().get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record) if state and state.get(self.cursor_field): state[self.cursor_field] = int(state[self.cursor_field]) return state @@ -529,10 +516,8 @@ def request_params( self, stream_state: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs ) -> MutableMapping[str, Any]: """Adds the filtering field 'start_time'""" - params = super().request_params(stream_state=stream_state, - next_page_token=next_page_token, **kwargs) - start_time = self.str2unixtime( - (stream_state or {}).get(self.cursor_field)) + params = super().request_params(stream_state=stream_state, next_page_token=next_page_token, **kwargs) + start_time = self.str2unixtime((stream_state or {}).get(self.cursor_field)) if not start_time: start_time = int(time.mktime(self._start_date.timetuple())) diff --git a/docs/integrations/sources/zendesk-support.md b/docs/integrations/sources/zendesk-support.md index 58bed1104177b..598e77acb1321 100644 --- a/docs/integrations/sources/zendesk-support.md +++ b/docs/integrations/sources/zendesk-support.md @@ -55,20 +55,20 @@ This Source is capable of syncing the following core Streams: ### Data type mapping | Integration Type | Airbyte Type | Notes | -| :--------------- | :----------- | :---- | -| `string` | `string` | | -| `number` | `number` | | -| `array` | `array` | | -| `object` | `object` | | +| :--- | :--- | :--- | +| `string` | `string` | | +| `number` | `number` | | +| `array` | `array` | | +| `object` | `object` | | ### Features -| Feature | Supported?\(Yes/No\) | Notes | -| :----------------------------------- | :------------------- | :--------------------------------------- | -| Full Refresh Sync | Yes | | -| Incremental - Append Sync | Yes | | -| Incremental - Debuped + History Sync | Yes | Enabled according to type of destination | -| Namespaces | No | | +| Feature | Supported?\(Yes/No\) | Notes | +| :--- | :--- | :--- | +| Full Refresh Sync | Yes | | +| Incremental - Append Sync | Yes | | +| Incremental - Debuped + History Sync | Yes | Enabled according to type of destination | +| Namespaces | No | | ### Performance considerations @@ -95,11 +95,10 @@ We recommend creating a restricted, read-only key specifically for Airbyte acces ### CHANGELOG -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :----------------------------------------------------- | :----------------------------------------------------- | -| `0.1.4` | 2021-10-26 | [7377](https://github.com/airbytehq/airbyte/pull/7377) | fix initially_assigned_at type in ticket metrics | -| `0.1.3` | 2021-10-17 | [7097](https://github.com/airbytehq/airbyte/pull/7097) | correction of spec file | -| `0.1.2` | 2021-10-16 | [6513](https://github.com/airbytehq/airbyte/pull/6513) | fixed comments stream | +| Version | Date | Pull Request | Subject | +| :------ | :-------- | :----- | :------ | +| `0.1.3` | 2021-10-17 | [7097](https://github.com/airbytehq/airbyte/pull/7097) | correction of spec file | +| `0.1.2` | 2021-10-16 | [6513](https://github.com/airbytehq/airbyte/pull/6513) | fixed comments stream | | `0.1.1` | 2021-09-02 | [5787](https://github.com/airbytehq/airbyte/pull/5787) | fixed incremental logic for the ticket_comments stream | -| `0.1.0` | 2021-07-21 | [4861](https://github.com/airbytehq/airbyte/pull/4861) | created CDK native zendesk connector | +| `0.1.0` | 2021-07-21 | [4861](https://github.com/airbytehq/airbyte/pull/4861) | created CDK native zendesk connector | From 5cb0ea951a1d73ca0c78a0a8ef76d36d65807cc3 Mon Sep 17 00:00:00 2001 From: antixar Date: Mon, 8 Nov 2021 12:44:04 +0200 Subject: [PATCH 22/36] reset failed changes --- .../resources/seed/source_definitions.yaml | 84 ++++++------------- 1 file changed, 24 insertions(+), 60 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 588d12bb6245f..b671b808ff65f 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -102,12 +102,6 @@ dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/cockroachdb sourceType: database -- name: Delighted - sourceDefinitionId: cc88c43f-6f53-4e8a-8c4d-b284baaf9635 - dockerRepository: airbyte/source-delighted - dockerImageTag: 0.1.0 - documentationUrl: https://docs.airbyte.io/integrations/sources/delighted - sourceType: api - name: Dixa sourceDefinitionId: 0b5c867e-1b12-4d02-ab74-97b2184ff6d7 dockerRepository: airbyte/source-dixa @@ -131,14 +125,14 @@ - name: Facebook Marketing sourceDefinitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c dockerRepository: airbyte/source-facebook-marketing - dockerImageTag: 0.2.22 + dockerImageTag: 0.2.21 documentationUrl: https://docs.airbyte.io/integrations/sources/facebook-marketing icon: facebook.svg sourceType: api - name: Facebook Pages sourceDefinitionId: 010eb12f-837b-4685-892d-0a39f76a98f5 dockerRepository: airbyte/source-facebook-pages - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.2 documentationUrl: https://hub.docker.com/r/airbyte/source-facebook-pages icon: facebook.svg sourceType: api @@ -156,12 +150,6 @@ documentationUrl: https://docs.airbyte.io/integrations/sources/freshdesk icon: freshdesk.svg sourceType: api -- name: Freshsales - sourceDefinitionId: eca08d79-7b92-4065-b7f3-79c14836ebe7 - dockerRepository: airbyte/source-freshsales - dockerImageTag: 0.1.0 - documentationUrl: https://docs.airbyte.io/integrations/sources/freshsales - sourceType: api - name: Freshservice sourceDefinitionId: 9bb85338-ea95-4c93-b267-6be89125b267 dockerRepository: airbyte/source-freshservice @@ -198,7 +186,7 @@ - name: Google Directory sourceDefinitionId: d19ae824-e289-4b14-995a-0632eb46d246 dockerRepository: airbyte/source-google-directory - dockerImageTag: 0.1.8 + dockerImageTag: 0.1.5 documentationUrl: https://docs.airbyte.io/integrations/sources/google-directory sourceType: api - name: Google Search Console @@ -217,13 +205,13 @@ - name: Google Workspace Admin Reports sourceDefinitionId: ed9dfefa-1bbc-419d-8c5e-4d78f0ef6734 dockerRepository: airbyte/source-google-workspace-admin-reports - dockerImageTag: 0.1.5 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/google-workspace-admin-reports sourceType: api - name: Greenhouse sourceDefinitionId: 59f1e50a-331f-4f09-b3e8-2e8d4d355f44 dockerRepository: airbyte/source-greenhouse - dockerImageTag: 0.2.6 + dockerImageTag: 0.2.5 documentationUrl: https://docs.airbyte.io/integrations/sources/greenhouse icon: greenhouse.svg sourceType: api @@ -236,7 +224,7 @@ - name: Hubspot sourceDefinitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c dockerRepository: airbyte/source-hubspot - dockerImageTag: 0.1.22 + dockerImageTag: 0.1.21 documentationUrl: https://docs.airbyte.io/integrations/sources/hubspot icon: hubspot.svg sourceType: api @@ -262,13 +250,13 @@ - name: Iterable sourceDefinitionId: 2e875208-0c0b-4ee4-9e92-1cb3156ea799 dockerRepository: airbyte/source-iterable - dockerImageTag: 0.1.11 + dockerImageTag: 0.1.9 documentationUrl: https://docs.airbyte.io/integrations/sources/iterable sourceType: api - name: Jira sourceDefinitionId: 68e63de2-bb83-4c7e-93fa-a8a9051e3993 dockerRepository: airbyte/source-jira - dockerImageTag: 0.2.14 + dockerImageTag: 0.2.13 documentationUrl: https://docs.airbyte.io/integrations/sources/jira icon: jira.svg sourceType: api @@ -334,16 +322,10 @@ - name: Mixpanel sourceDefinitionId: 12928b32-bf0a-4f1e-964f-07e12e37153a dockerRepository: airbyte/source-mixpanel - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.1 documentationUrl: https://docs.airbyte.io/integrations/sources/mixpanel icon: mixpanel.svg sourceType: api -- name: Monday - sourceDefinitionId: 80a54ea2-9959-4040-aac1-eee42423ec9b - dockerRepository: airbyte/source-monday - dockerImageTag: 0.1.0 - documentationUrl: https://docs.airbyte.io/integrations/sources/monday - sourceType: api - name: MongoDb sourceDefinitionId: b2e713cd-cc36-4c0a-b5bd-b47cb8a0561e dockerRepository: airbyte/source-mongodb-v2 @@ -354,14 +336,14 @@ - name: MySQL sourceDefinitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad dockerRepository: airbyte/source-mysql - dockerImageTag: 0.4.9 + dockerImageTag: 0.4.8 documentationUrl: https://docs.airbyte.io/integrations/sources/mysql icon: mysql.svg sourceType: database - name: Okta sourceDefinitionId: 1d4fdb25-64fc-4569-92da-fcdca79a8372 dockerRepository: airbyte/source-okta - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/okta sourceType: api - name: OneSignal @@ -382,13 +364,6 @@ dockerImageTag: 0.1.1 documentationUrl: https://docs.airbyte.io/integrations/sources/paypal-transaction sourceType: api -- name: Paystack - sourceDefinitionId: 193bdcb8-1dd9-48d1-aade-91cadfd74f9b - dockerRepository: airbyte/source-paystack - dockerImageTag: 0.1.0 - documentationUrl: https://docs.airbyte.io/integrations/sources/paystack - icon: paystack.svg - sourceType: api - name: Pipedrive sourceDefinitionId: d8286229-c680-4063-8c59-23b9b391c700 dockerRepository: airbyte/source-pipedrive @@ -417,15 +392,15 @@ - name: Postgres sourceDefinitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 dockerRepository: airbyte/source-postgres - dockerImageTag: 0.3.13 + dockerImageTag: 0.3.11 documentationUrl: https://docs.airbyte.io/integrations/sources/postgres icon: postgresql.svg sourceType: database -- sourceDefinitionId: c8630570-086d-4a40-99ae-ea5b18673071 - name: Zendesk Talk - dockerRepository: airbyte/source-zendesk-talk - dockerImageTag: 0.1.3 - documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-talk +- name: Prestashop + sourceDefinitionId: d60a46d4-709f-4092-a6b7-2457f7d455f5 + dockerRepository: airbyte/source-prestashop + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/presta-shop sourceType: api - name: Quickbooks sourceDefinitionId: 29b409d9-30a5-4cc8-ad50-886eb846fea3 @@ -436,7 +411,7 @@ - name: Recharge sourceDefinitionId: 45d2e135-2ede-49e1-939f-3e3ec357a65e dockerRepository: airbyte/source-recharge - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.3 documentationUrl: https://docs.airbyte.io/integrations/sources/recharge sourceType: api - name: Recurly @@ -453,12 +428,6 @@ documentationUrl: https://docs.airbyte.io/integrations/sources/redshift icon: redshift.svg sourceType: database -- name: Retently - sourceDefinitionId: db04ecd1-42e7-4115-9cec-95812905c626 - dockerRepository: airbyte/source-retently - dockerImageTag: 0.1.0 - documentationUrl: https://docs.airbyte.io/integrations/sources/retently - sourceType: api - name: S3 sourceDefinitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 dockerRepository: airbyte/source-s3 @@ -474,7 +443,7 @@ - name: Salesforce sourceDefinitionId: b117307c-14b6-41aa-9422-947e34922962 dockerRepository: airbyte/source-salesforce - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/salesforce icon: salesforce.svg sourceType: api @@ -488,7 +457,7 @@ - name: Shopify sourceDefinitionId: 9da77001-af33-4bcd-be46-6252bf9342b9 dockerRepository: airbyte/source-shopify - dockerImageTag: 0.1.22 + dockerImageTag: 0.1.21 documentationUrl: https://docs.airbyte.io/integrations/sources/shopify sourceType: api - name: Short.io @@ -536,14 +505,14 @@ - name: Stripe sourceDefinitionId: e094cb9a-26de-4645-8761-65c0c425d1de dockerRepository: airbyte/source-stripe - dockerImageTag: 0.1.22 + dockerImageTag: 0.1.21 documentationUrl: https://docs.airbyte.io/integrations/sources/stripe icon: stripe.svg sourceType: api - name: Survey Monkey sourceDefinitionId: badc5925-0485-42be-8caa-b34096cb71b5 dockerRepository: airbyte/source-surveymonkey - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.0 documentationUrl: https://docs.airbyte.io/integrations/sources/surveymonkey sourceType: api - name: Tempo @@ -585,7 +554,7 @@ - name: Zendesk Chat sourceDefinitionId: 40d24d0f-b8f9-4fe0-9e6c-b06c0f3f45e4 dockerRepository: airbyte/source-zendesk-chat - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-chat icon: zendesk.svg sourceType: api @@ -598,7 +567,7 @@ - name: Zendesk Support sourceDefinitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 dockerRepository: airbyte/source-zendesk-support - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.3 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-support icon: zendesk.svg sourceType: api @@ -608,11 +577,6 @@ dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-talk sourceType: api -- sourceDefinitionId: cdaf146a-9b75-49fd-9dd2-9d64a0bb4781 - name: Sentry - dockerRepository: airbyte/source-sentry - dockerImageTag: 0.1.0 - documentationUrl: https://docs.airbyte.io/integrations/sources/sentry - name: Zoom sourceDefinitionId: aea2fd0d-377d-465e-86c0-4fdc4f688e51 dockerRepository: airbyte/source-zoom-singer From f4746b7e736918e23cae89f552f53dbd4bd40e47 Mon Sep 17 00:00:00 2001 From: antixar Date: Mon, 8 Nov 2021 12:45:02 +0200 Subject: [PATCH 23/36] reset failed changes --- .../connectors/source-zendesk-support/Dockerfile | 2 +- .../source_zendesk_support/schemas/ticket_metrics.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile index f44e3e602d74d..42475d2905f52 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile +++ b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile @@ -25,5 +25,5 @@ COPY source_zendesk_support ./source_zendesk_support ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/source-zendesk-support diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_metrics.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_metrics.json index 454ab85dffc8a..a139c863d2b91 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_metrics.json +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_metrics.json @@ -132,7 +132,7 @@ }, "initially_assigned_at": { "type": ["null", "string"], - "format": "datetime" + "format": "date-time" }, "assigned_at": { "type": ["null", "string"], From 427ab95b93f5b89b64d85d129d51071be15f51c4 Mon Sep 17 00:00:00 2001 From: antixar Date: Mon, 8 Nov 2021 12:47:01 +0200 Subject: [PATCH 24/36] reset failed changes --- .../resources/seed/source_definitions.yaml | 74 ++++++++++++++----- docs/integrations/sources/zendesk-support.md | 1 + 2 files changed, 56 insertions(+), 19 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index b671b808ff65f..a842e98bb00ac 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -102,6 +102,12 @@ dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/cockroachdb sourceType: database +- name: Delighted + sourceDefinitionId: cc88c43f-6f53-4e8a-8c4d-b284baaf9635 + dockerRepository: airbyte/source-delighted + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/delighted + sourceType: api - name: Dixa sourceDefinitionId: 0b5c867e-1b12-4d02-ab74-97b2184ff6d7 dockerRepository: airbyte/source-dixa @@ -125,14 +131,14 @@ - name: Facebook Marketing sourceDefinitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c dockerRepository: airbyte/source-facebook-marketing - dockerImageTag: 0.2.21 + dockerImageTag: 0.2.22 documentationUrl: https://docs.airbyte.io/integrations/sources/facebook-marketing icon: facebook.svg sourceType: api - name: Facebook Pages sourceDefinitionId: 010eb12f-837b-4685-892d-0a39f76a98f5 dockerRepository: airbyte/source-facebook-pages - dockerImageTag: 0.1.2 + dockerImageTag: 0.1.3 documentationUrl: https://hub.docker.com/r/airbyte/source-facebook-pages icon: facebook.svg sourceType: api @@ -150,6 +156,12 @@ documentationUrl: https://docs.airbyte.io/integrations/sources/freshdesk icon: freshdesk.svg sourceType: api +- name: Freshsales + sourceDefinitionId: eca08d79-7b92-4065-b7f3-79c14836ebe7 + dockerRepository: airbyte/source-freshsales + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/freshsales + sourceType: api - name: Freshservice sourceDefinitionId: 9bb85338-ea95-4c93-b267-6be89125b267 dockerRepository: airbyte/source-freshservice @@ -186,7 +198,7 @@ - name: Google Directory sourceDefinitionId: d19ae824-e289-4b14-995a-0632eb46d246 dockerRepository: airbyte/source-google-directory - dockerImageTag: 0.1.5 + dockerImageTag: 0.1.8 documentationUrl: https://docs.airbyte.io/integrations/sources/google-directory sourceType: api - name: Google Search Console @@ -205,13 +217,13 @@ - name: Google Workspace Admin Reports sourceDefinitionId: ed9dfefa-1bbc-419d-8c5e-4d78f0ef6734 dockerRepository: airbyte/source-google-workspace-admin-reports - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.5 documentationUrl: https://docs.airbyte.io/integrations/sources/google-workspace-admin-reports sourceType: api - name: Greenhouse sourceDefinitionId: 59f1e50a-331f-4f09-b3e8-2e8d4d355f44 dockerRepository: airbyte/source-greenhouse - dockerImageTag: 0.2.5 + dockerImageTag: 0.2.6 documentationUrl: https://docs.airbyte.io/integrations/sources/greenhouse icon: greenhouse.svg sourceType: api @@ -224,7 +236,7 @@ - name: Hubspot sourceDefinitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c dockerRepository: airbyte/source-hubspot - dockerImageTag: 0.1.21 + dockerImageTag: 0.1.22 documentationUrl: https://docs.airbyte.io/integrations/sources/hubspot icon: hubspot.svg sourceType: api @@ -250,13 +262,13 @@ - name: Iterable sourceDefinitionId: 2e875208-0c0b-4ee4-9e92-1cb3156ea799 dockerRepository: airbyte/source-iterable - dockerImageTag: 0.1.9 + dockerImageTag: 0.1.11 documentationUrl: https://docs.airbyte.io/integrations/sources/iterable sourceType: api - name: Jira sourceDefinitionId: 68e63de2-bb83-4c7e-93fa-a8a9051e3993 dockerRepository: airbyte/source-jira - dockerImageTag: 0.2.13 + dockerImageTag: 0.2.14 documentationUrl: https://docs.airbyte.io/integrations/sources/jira icon: jira.svg sourceType: api @@ -322,10 +334,16 @@ - name: Mixpanel sourceDefinitionId: 12928b32-bf0a-4f1e-964f-07e12e37153a dockerRepository: airbyte/source-mixpanel - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.3 documentationUrl: https://docs.airbyte.io/integrations/sources/mixpanel icon: mixpanel.svg sourceType: api +- name: Monday + sourceDefinitionId: 80a54ea2-9959-4040-aac1-eee42423ec9b + dockerRepository: airbyte/source-monday + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/monday + sourceType: api - name: MongoDb sourceDefinitionId: b2e713cd-cc36-4c0a-b5bd-b47cb8a0561e dockerRepository: airbyte/source-mongodb-v2 @@ -336,14 +354,14 @@ - name: MySQL sourceDefinitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad dockerRepository: airbyte/source-mysql - dockerImageTag: 0.4.8 + dockerImageTag: 0.4.9 documentationUrl: https://docs.airbyte.io/integrations/sources/mysql icon: mysql.svg sourceType: database - name: Okta sourceDefinitionId: 1d4fdb25-64fc-4569-92da-fcdca79a8372 dockerRepository: airbyte/source-okta - dockerImageTag: 0.1.2 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/okta sourceType: api - name: OneSignal @@ -364,6 +382,13 @@ dockerImageTag: 0.1.1 documentationUrl: https://docs.airbyte.io/integrations/sources/paypal-transaction sourceType: api +- name: Paystack + sourceDefinitionId: 193bdcb8-1dd9-48d1-aade-91cadfd74f9b + dockerRepository: airbyte/source-paystack + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/paystack + icon: paystack.svg + sourceType: api - name: Pipedrive sourceDefinitionId: d8286229-c680-4063-8c59-23b9b391c700 dockerRepository: airbyte/source-pipedrive @@ -392,7 +417,7 @@ - name: Postgres sourceDefinitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 dockerRepository: airbyte/source-postgres - dockerImageTag: 0.3.11 + dockerImageTag: 0.3.13 documentationUrl: https://docs.airbyte.io/integrations/sources/postgres icon: postgresql.svg sourceType: database @@ -411,7 +436,7 @@ - name: Recharge sourceDefinitionId: 45d2e135-2ede-49e1-939f-3e3ec357a65e dockerRepository: airbyte/source-recharge - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/recharge sourceType: api - name: Recurly @@ -428,6 +453,12 @@ documentationUrl: https://docs.airbyte.io/integrations/sources/redshift icon: redshift.svg sourceType: database +- name: Retently + sourceDefinitionId: db04ecd1-42e7-4115-9cec-95812905c626 + dockerRepository: airbyte/source-retently + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/retently + sourceType: api - name: S3 sourceDefinitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 dockerRepository: airbyte/source-s3 @@ -443,7 +474,7 @@ - name: Salesforce sourceDefinitionId: b117307c-14b6-41aa-9422-947e34922962 dockerRepository: airbyte/source-salesforce - dockerImageTag: 0.1.2 + dockerImageTag: 0.1.3 documentationUrl: https://docs.airbyte.io/integrations/sources/salesforce icon: salesforce.svg sourceType: api @@ -457,7 +488,7 @@ - name: Shopify sourceDefinitionId: 9da77001-af33-4bcd-be46-6252bf9342b9 dockerRepository: airbyte/source-shopify - dockerImageTag: 0.1.21 + dockerImageTag: 0.1.22 documentationUrl: https://docs.airbyte.io/integrations/sources/shopify sourceType: api - name: Short.io @@ -505,14 +536,14 @@ - name: Stripe sourceDefinitionId: e094cb9a-26de-4645-8761-65c0c425d1de dockerRepository: airbyte/source-stripe - dockerImageTag: 0.1.21 + dockerImageTag: 0.1.22 documentationUrl: https://docs.airbyte.io/integrations/sources/stripe icon: stripe.svg sourceType: api - name: Survey Monkey sourceDefinitionId: badc5925-0485-42be-8caa-b34096cb71b5 dockerRepository: airbyte/source-surveymonkey - dockerImageTag: 0.1.0 + dockerImageTag: 0.1.3 documentationUrl: https://docs.airbyte.io/integrations/sources/surveymonkey sourceType: api - name: Tempo @@ -554,7 +585,7 @@ - name: Zendesk Chat sourceDefinitionId: 40d24d0f-b8f9-4fe0-9e6c-b06c0f3f45e4 dockerRepository: airbyte/source-zendesk-chat - dockerImageTag: 0.1.2 + dockerImageTag: 0.1.3 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-chat icon: zendesk.svg sourceType: api @@ -567,7 +598,7 @@ - name: Zendesk Support sourceDefinitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 dockerRepository: airbyte/source-zendesk-support - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-support icon: zendesk.svg sourceType: api @@ -577,6 +608,11 @@ dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-talk sourceType: api +- sourceDefinitionId: cdaf146a-9b75-49fd-9dd2-9d64a0bb4781 + name: Sentry + dockerRepository: airbyte/source-sentry + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/sentry - name: Zoom sourceDefinitionId: aea2fd0d-377d-465e-86c0-4fdc4f688e51 dockerRepository: airbyte/source-zoom-singer diff --git a/docs/integrations/sources/zendesk-support.md b/docs/integrations/sources/zendesk-support.md index 598e77acb1321..69bcfba6c75c2 100644 --- a/docs/integrations/sources/zendesk-support.md +++ b/docs/integrations/sources/zendesk-support.md @@ -97,6 +97,7 @@ We recommend creating a restricted, read-only key specifically for Airbyte acces | Version | Date | Pull Request | Subject | | :------ | :-------- | :----- | :------ | +| `0.1.4` | 2021-10-26 | [7377](https://github.com/airbytehq/airbyte/pull/7377) | fix initially_assigned_at type in ticket metrics | | `0.1.3` | 2021-10-17 | [7097](https://github.com/airbytehq/airbyte/pull/7097) | correction of spec file | | `0.1.2` | 2021-10-16 | [6513](https://github.com/airbytehq/airbyte/pull/6513) | fixed comments stream | | `0.1.1` | 2021-09-02 | [5787](https://github.com/airbytehq/airbyte/pull/5787) | fixed incremental logic for the ticket_comments stream | From b9a20169828a4a0fdcaa23ed9a8a587f9f9733ae Mon Sep 17 00:00:00 2001 From: antixar Date: Mon, 8 Nov 2021 16:46:09 +0200 Subject: [PATCH 25/36] update json set value --- tools/bin/ci_credentials.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index 37cd94ee1db77..7878c254f5c62 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -21,7 +21,7 @@ function write_standard_creds() { local source_name=${4:-github} [ -z "$connector_name" ] && error "Empty connector name" - [ -z "$creds" ] && error "!!!!!Creds not set for $connector_name" + [ -z "$creds" ] && error "!!!!!Creds not set for the connector $connector_name from ${source_name}" if [[ $CONNECTOR_NAME != "all" && ${connector_name} != ${CONNECTOR_NAME} ]]; then return 0 @@ -69,7 +69,7 @@ function write_all_secrets() { function export_github_secrets(){ - local pairs=`echo ${SECRETS_JSON} | jq -c 'keys[] as $k | {"name": $k, "value": .[$k]} | @base64'` + local pairs=`echo ${GITHUB_PROVIDED_SECRETS_JSON} | jq -c 'keys[] as $k | {"name": $k, "value": .[$k]} | @base64'` while read row; do pair=$(echo "${row}" | tr -d '"' | base64 -d) local key=$(echo ${pair} | jq -r .name) @@ -78,7 +78,7 @@ function export_github_secrets(){ declare -gxr "${key}"="$(echo ${value})" fi done <<< ${pairs} - unset SECRETS_JSON + unset GITHUB_PROVIDED_SECRETS_JSON } function export_gsm_secrets(){ From 72d2121bbc7c7ffee36bfbfd1d6e77e1a26a701b Mon Sep 17 00:00:00 2001 From: antixar Date: Wed, 10 Nov 2021 00:55:10 +0200 Subject: [PATCH 26/36] update spec file --- .github/workflows/publish-command.yml | 138 +++++++++++++++++- .github/workflows/test-command.yml | 138 +++++++++++++++++- .../acceptance-test-config.yml | 2 + .../acceptance-test-docker.sh | 2 +- .../source_linkedin_ads/source.py | 17 ++- .../source_linkedin_ads/spec.json | 103 +++++++++++-- 6 files changed, 381 insertions(+), 19 deletions(-) diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index bdffe002ae456..35b495d33b47f 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -70,10 +70,142 @@ jobs: - name: Install Pyenv run: python3 -m pip install virtualenv==16.7.9 --user - name: Write Integration Test Credentials # TODO DRY this with test-command.yml - run: ./tools/bin/ci_credentials.sh ${{ github.event.inputs.connector }} + run: ./tools/bin/ci_credentials.sh env: - SECRETS_JSON: ${{ toJson(secrets) }} - GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} + AMAZON_SELLER_PARTNER_TEST_CREDS: ${{ secrets.AMAZON_SELLER_PARTNER_TEST_CREDS }} + AMAZON_ADS_TEST_CREDS: ${{ secrets.AMAZON_ADS_TEST_CREDS }} + AMPLITUDE_INTEGRATION_TEST_CREDS: ${{ secrets.AMPLITUDE_INTEGRATION_TEST_CREDS }} + AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} + AWS_REDSHIFT_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_REDSHIFT_INTEGRATION_TEST_CREDS }} + AWS_ORACLE_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_ORACLE_INTEGRATION_TEST_CREDS }} + SOURCE_AWS_CLOUDTRAIL_CREDS: ${{ secrets.SOURCE_AWS_CLOUDTRAIL_CREDS }} + AZURE_STORAGE_INTEGRATION_TEST_CREDS: ${{ secrets.AZURE_STORAGE_INTEGRATION_TEST_CREDS }} + BIGQUERY_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_INTEGRATION_TEST_CREDS }} + BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS }} + SOURCE_BING_ADS_CREDS: ${{ secrets.SOURCE_BING_ADS_CREDS }} + BIGQUERY_TEST_CREDS: ${{ secrets.BIGQUERY_TEST_CREDS }} + BRAINTREE_TEST_CREDS: ${{ secrets.BRAINTREE_TEST_CREDS }} + CART_TEST_CREDS: ${{ secrets.CART_TEST_CREDS }} + CHARGEBEE_INTEGRATION_TEST_CREDS: ${{ secrets.CHARGEBEE_INTEGRATION_TEST_CREDS }} + DESTINATION_POSTGRES_SSH_KEY_TEST_CREDS: ${{ secrets.DESTINATION_POSTGRES_SSH_KEY_TEST_CREDS }} + DESTINATION_POSTGRES_SSH_PWD_TEST_CREDS: ${{ secrets.DESTINATION_POSTGRES_SSH_PWD_TEST_CREDS }} + DESTINATION_PUBSUB_TEST_CREDS: ${{ secrets.DESTINATION_PUBSUB_TEST_CREDS }} + DESTINATION_KEEN_TEST_CREDS: ${{ secrets.DESTINATION_KEEN_TEST_CREDS }} + DESTINATION_KVDB_TEST_CREDS: ${{ secrets.DESTINATION_KVDB_TEST_CREDS }} + DRIFT_INTEGRATION_TEST_CREDS: ${{ secrets.DRIFT_INTEGRATION_TEST_CREDS }} + SOURCE_DIXA_TEST_CREDS: ${{ secrets.SOURCE_DIXA_TEST_CREDS }} + EXCHANGE_RATES_TEST_CREDS: ${{ secrets.EXCHANGE_RATES_TEST_CREDS }} + FACEBOOK_MARKETING_TEST_INTEGRATION_CREDS: ${{ secrets.FACEBOOK_MARKETING_TEST_INTEGRATION_CREDS }} + FACEBOOK_PAGES_INTEGRATION_TEST_CREDS: ${{ secrets.FACEBOOK_PAGES_INTEGRATION_TEST_CREDS }} + FILE_SECURE_HTTPS_TEST_CREDS: ${{ secrets.FILE_SECURE_HTTPS_TEST_CREDS }} + FRESHDESK_TEST_CREDS: ${{ secrets.FRESHDESK_TEST_CREDS }} + GITLAB_INTEGRATION_TEST_CREDS: ${{ secrets.GITLAB_INTEGRATION_TEST_CREDS }} + GH_NATIVE_INTEGRATION_TEST_CREDS: ${{ secrets.GH_NATIVE_INTEGRATION_TEST_CREDS }} + GOOGLE_ADS_TEST_CREDS: ${{ secrets.GOOGLE_ADS_TEST_CREDS }} + GOOGLE_ANALYTICS_V4_TEST_CREDS: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS }} + GOOGLE_ANALYTICS_V4_TEST_CREDS_SRV_ACC: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS_SRV_ACC }} + GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD }} + GOOGLE_CLOUD_STORAGE_TEST_CREDS: ${{ secrets.GOOGLE_CLOUD_STORAGE_TEST_CREDS }} + GOOGLE_DIRECTORY_TEST_CREDS: ${{ secrets.GOOGLE_DIRECTORY_TEST_CREDS }} + GOOGLE_DIRECTORY_TEST_CREDS_OAUTH: ${{ secrets.GOOGLE_DIRECTORY_TEST_CREDS_OAUTH }} + GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS }} + GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC }} + GOOGLE_SHEETS_TESTS_CREDS: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS }} + GOOGLE_SHEETS_TESTS_CREDS_SRV_ACC: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS_SRV_ACC }} + GOOGLE_SHEETS_TESTS_CREDS_OLD: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS_OLD }} + GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS: ${{ secrets.GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS }} + GREENHOUSE_TEST_CREDS: ${{ secrets.GREENHOUSE_TEST_CREDS }} + GREENHOUSE_TEST_CREDS_LIMITED: ${{ secrets.GREENHOUSE_TEST_CREDS_LIMITED }} + HARVEST_INTEGRATION_TESTS_CREDS: ${{ secrets.HARVEST_INTEGRATION_TESTS_CREDS }} + HUBSPOT_INTEGRATION_TESTS_CREDS: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS }} + HUBSPOT_INTEGRATION_TESTS_CREDS_OAUTH: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS_OAUTH }} + INSTAGRAM_INTEGRATION_TESTS_CREDS: ${{ secrets.INSTAGRAM_INTEGRATION_TESTS_CREDS }} + INTERCOM_INTEGRATION_TEST_CREDS: ${{ secrets.INTERCOM_INTEGRATION_TEST_CREDS }} + INTERCOM_INTEGRATION_OAUTH_TEST_CREDS: ${{ secrets.INTERCOM_INTEGRATION_OAUTH_TEST_CREDS }} + ITERABLE_INTEGRATION_TEST_CREDS: ${{ secrets.ITERABLE_INTEGRATION_TEST_CREDS }} + JIRA_INTEGRATION_TEST_CREDS: ${{ secrets.JIRA_INTEGRATION_TEST_CREDS }} + KLAVIYO_TEST_CREDS: ${{ secrets.KLAVIYO_TEST_CREDS }} + LEVER_HIRING_INTEGRATION_TEST_CREDS: ${{ secrets.LEVER_HIRING_INTEGRATION_TEST_CREDS }} + LOOKER_INTEGRATION_TEST_CREDS: ${{ secrets.LOOKER_INTEGRATION_TEST_CREDS }} + MAILCHIMP_TEST_CREDS: ${{ secrets.MAILCHIMP_TEST_CREDS }} + MICROSOFT_TEAMS_TEST_CREDS: ${{ secrets.MICROSOFT_TEAMS_TEST_CREDS }} + MIXPANEL_INTEGRATION_TEST_CREDS: ${{ secrets.MIXPANEL_INTEGRATION_TEST_CREDS }} + MSSQL_RDS_TEST_CREDS: ${{ secrets.MSSQL_RDS_TEST_CREDS }} + PAYPAL_TRANSACTION_CREDS: ${{ secrets.SOURCE_PAYPAL_TRANSACTION_CREDS }} + PINTEREST_TEST_CREDS: ${{ secrets.PINTEREST_TEST_CREDS }} + POSTGRES_SSH_KEY_TEST_CREDS: ${{ secrets.POSTGRES_SSH_KEY_TEST_CREDS }} + POSTGRES_SSH_PWD_TEST_CREDS: ${{ secrets.POSTGRES_SSH_PWD_TEST_CREDS }} + MYSQL_SSH_KEY_TEST_CREDS: ${{ secrets.MYSQL_SSH_KEY_TEST_CREDS }} + MYSQL_SSH_PWD_TEST_CREDS: ${{ secrets.MYSQL_SSH_PWD_TEST_CREDS }} + POSTHOG_TEST_CREDS: ${{ secrets.POSTHOG_TEST_CREDS }} + PIPEDRIVE_INTEGRATION_TESTS_CREDS: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS }} + PIPEDRIVE_INTEGRATION_TESTS_CREDS_OAUTH: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS_OAUTH }} + PIPEDRIVE_INTEGRATION_TESTS_CREDS_OLD: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS_OLD }} + RECHARGE_INTEGRATION_TEST_CREDS: ${{ secrets.RECHARGE_INTEGRATION_TEST_CREDS }} + QUICKBOOKS_TEST_CREDS: ${{ secrets.QUICKBOOKS_TEST_CREDS }} + SALESFORCE_BULK_INTEGRATION_TESTS_CREDS: ${{ secrets.SALESFORCE_BULK_INTEGRATION_TESTS_CREDS }} + SALESFORCE_INTEGRATION_TESTS_CREDS: ${{ secrets.SALESFORCE_INTEGRATION_TESTS_CREDS }} + SENDGRID_INTEGRATION_TEST_CREDS: ${{ secrets.SENDGRID_INTEGRATION_TEST_CREDS }} + SHOPIFY_INTEGRATION_TEST_CREDS: ${{ secrets.SHOPIFY_INTEGRATION_TEST_CREDS }} + SHOPIFY_INTEGRATION_TEST_OAUTH_CREDS: ${{ secrets.SHOPIFY_INTEGRATION_TEST_OAUTH_CREDS }} + SOURCE_ASANA_TEST_CREDS: ${{ secrets.SOURCE_ASANA_TEST_CREDS }} + SOURCE_OKTA_TEST_CREDS: ${{ secrets.SOURCE_OKTA_TEST_CREDS }} + SOURCE_SLACK_TEST_CREDS: ${{ secrets.SOURCE_SLACK_TEST_CREDS }} + SOURCE_SLACK_OAUTH_TEST_CREDS: ${{ secrets.SOURCE_SLACK_OAUTH_TEST_CREDS }} + SOURCE_US_CENSUS_TEST_CREDS: ${{ secrets.SOURCE_US_CENSUS_TEST_CREDS }} + SMARTSHEETS_TEST_CREDS: ${{ secrets.SMARTSHEETS_TEST_CREDS }} + SOURCE_SNAPCHAT_MARKETING_CREDS: ${{ secrets.SOURCE_SNAPCHAT_MARKETING_CREDS }} + SNOWFLAKE_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_INTEGRATION_TEST_CREDS }} + SNOWFLAKE_S3_COPY_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_S3_COPY_INTEGRATION_TEST_CREDS }} + SNOWFLAKE_GCS_COPY_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_GCS_COPY_INTEGRATION_TEST_CREDS }} + SOURCE_SQUARE_CREDS: ${{ secrets.SOURCE_SQUARE_CREDS }} + SOURCE_MARKETO_TEST_CREDS: ${{ secrets.SOURCE_MARKETO_TEST_CREDS }} + SOURCE_RECURLY_INTEGRATION_TEST_CREDS: ${{ secrets.SOURCE_RECURLY_INTEGRATION_TEST_CREDS }} + SOURCE_S3_TEST_CREDS: ${{ secrets.SOURCE_S3_TEST_CREDS }} + SOURCE_S3_PARQUET_CREDS: ${{ secrets.SOURCE_S3_PARQUET_CREDS }} + SOURCE_SHORTIO_TEST_CREDS: ${{ secrets.SOURCE_SHORTIO_TEST_CREDS }} + SOURCE_STRIPE_CREDS: ${{ secrets.SOURCE_STRIPE_CREDS }} + STRIPE_INTEGRATION_CONNECTED_ACCOUNT_TEST_CREDS: ${{ secrets.STRIPE_INTEGRATION_CONNECTED_ACCOUNT_TEST_CREDS }} + SURVEYMONKEY_TEST_CREDS: ${{ secrets.SURVEYMONKEY_TEST_CREDS }} + TEMPO_INTEGRATION_TEST_CREDS: ${{ secrets.TEMPO_INTEGRATION_TEST_CREDS }} + TRELLO_TEST_CREDS: ${{ secrets.TRELLO_TEST_CREDS }} + TWILIO_TEST_CREDS: ${{ secrets.TWILIO_TEST_CREDS }} + SOURCE_TYPEFORM_CREDS: ${{ secrets.SOURCE_TYPEFORM_CREDS }} + ZENDESK_CHAT_INTEGRATION_TEST_CREDS: ${{ secrets.ZENDESK_CHAT_INTEGRATION_TEST_CREDS }} + ZENDESK_SUNSHINE_TEST_CREDS: ${{ secrets.ZENDESK_SUNSHINE_TEST_CREDS }} + ZENDESK_TALK_TEST_CREDS: ${{ secrets.ZENDESK_TALK_TEST_CREDS }} + ZENDESK_SUPPORT_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_TEST_CREDS }} + ZENDESK_SUPPORT_OAUTH_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_OAUTH_TEST_CREDS }} + ZOOM_INTEGRATION_TEST_CREDS: ${{ secrets.ZOOM_INTEGRATION_TEST_CREDS }} + PLAID_INTEGRATION_TEST_CREDS: ${{ secrets.PLAID_INTEGRATION_TEST_CREDS }} + DESTINATION_S3_INTEGRATION_TEST_CREDS: ${{ secrets.DESTINATION_S3_INTEGRATION_TEST_CREDS }} + DESTINATION_AZURE_BLOB_CREDS: ${{ secrets.DESTINATION_AZURE_BLOB_CREDS }} + DESTINATION_GCS_CREDS: ${{ secrets.DESTINATION_GCS_CREDS }} + APIFY_INTEGRATION_TEST_CREDS: ${{ secrets.APIFY_INTEGRATION_TEST_CREDS }} + DESTINATION_DYNAMODB_TEST_CREDS: ${{ secrets.DESTINATION_DYNAMODB_TEST_CREDS }} + SOURCE_ZUORA_TEST_CREDS: ${{ secrets.SOURCE_ZUORA_TEST_CREDS }} + SOURCE_CLOSE_COM_CREDS: ${{ secrets.SOURCE_CLOSE_COM_CREDS }} + SOURCE_BAMBOO_HR_CREDS: ${{ secrets.SOURCE_BAMBOO_HR_CREDS }} + SOURCE_LINKEDIN_ADS_TEST_CREDS: ${{ secrets.SOURCE_LINKEDIN_ADS_TEST_CREDS }} + SOURCE_BIGCOMMERCE_CREDS: ${{ secrets.SOURCE_BIGCOMMERCE_CREDS }} + SOURCE_TIKTOK_MARKETING_TEST_CREDS: ${{ secrets.SOURCE_TIKTOK_MARKETING_TEST_CREDS }} + SOURCE_TIKTOK_MARKETING_PROD_TEST_CREDS: ${{ secrets.SOURCE_TIKTOK_MARKETING_PROD_TEST_CREDS }} + DESTINATION_DATABRICKS_CREDS: ${{ secrets.DESTINATION_DATABRICKS_CREDS }} + MONGODB_TEST_CREDS: ${{ secrets.MONGODB_TEST_CREDS }} + SOURCE_ONESIGNAL_TEST_CREDS: ${{ secrets.SOURCE_ONESIGNAL_TEST_CREDS }} + SOURCE_SALESLOFT_TEST_CREDS: ${{ secrets.SOURCE_SALESLOFT_TEST_CREDS }} + SOURCE_CONFLUENCE_TEST_CREDS: ${{ secrets.SOURCE_CONFLUENCE_TEST_CREDS }} + SOURCE_AMAZON_SQS_TEST_CREDS: ${{ secrets.SOURCE_AMAZON_SQS_TEST_CREDS }} + SOURCE_FRESHSERVICE_TEST_CREDS: ${{ secrets.SOURCE_FRESHSERVICE_TEST_CREDS }} + SOURCE_LEMLIST_TEST_CREDS: ${{ secrets.SOURCE_LEMLIST_TEST_CREDS }} + SOURCE_STRAVA_TEST_CREDS: ${{ secrets.SOURCE_STRAVA_TEST_CREDS }} + SOURCE_PAYSTACK_TEST_CREDS: ${{ secrets.SOURCE_PAYSTACK_TEST_CREDS }} + SOURCE_DELIGHTED_TEST_CREDS: ${{ secrets.SOURCE_DELIGHTED_TEST_CREDS }} + SOURCE_RETENTLY_TEST_CREDS: ${{ secrets.SOURCE_RETENTLY_TEST_CREDS }} + SOURCE_SENTRY_TEST_CREDS: ${{ secrets.SOURCE_SENTRY_TEST_CREDS }} + SOURCE_FRESHSALES_TEST_CREDS: ${{ secrets.SOURCE_FRESHSALES_TEST_CREDS }} + SOURCE_MONDAY_TEST_CREDS: ${{ secrets.SOURCE_MONDAY_TEST_CREDS }} + SOURCE_COMMERCETOOLS_TEST_CREDS: ${{ secrets.SOURCE_COMMERCETOOLS_TEST_CREDS }} - run: | echo "$SPEC_CACHE_SERVICE_ACCOUNT_KEY" > spec_cache_key_file.json && docker login -u airbytebot -p ${DOCKER_PASSWORD} ./tools/integrations/manage.sh publish airbyte-integrations/${{ github.event.inputs.connector }} ${{ github.event.inputs.run-tests }} --publish_spec_to_cache diff --git a/.github/workflows/test-command.yml b/.github/workflows/test-command.yml index 1b05f0e974c57..976b2fdfc9e75 100644 --- a/.github/workflows/test-command.yml +++ b/.github/workflows/test-command.yml @@ -65,10 +65,142 @@ jobs: - name: Install Pyenv run: python3 -m pip install virtualenv==16.7.9 --user - name: Write Integration Test Credentials - run: ./tools/bin/ci_credentials.sh ${{ github.event.inputs.connector }} + run: ./tools/bin/ci_credentials.sh env: - SECRETS_JSON: ${{ toJson(secrets) }} - GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} + AMAZON_SELLER_PARTNER_TEST_CREDS: ${{ secrets.AMAZON_SELLER_PARTNER_TEST_CREDS }} + AMAZON_ADS_TEST_CREDS: ${{ secrets.AMAZON_ADS_TEST_CREDS }} + AMPLITUDE_INTEGRATION_TEST_CREDS: ${{ secrets.AMPLITUDE_INTEGRATION_TEST_CREDS }} + AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} + AWS_ORACLE_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_ORACLE_INTEGRATION_TEST_CREDS }} + SOURCE_AWS_CLOUDTRAIL_CREDS: ${{ secrets.SOURCE_AWS_CLOUDTRAIL_CREDS }} + AWS_REDSHIFT_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_REDSHIFT_INTEGRATION_TEST_CREDS }} + AZURE_STORAGE_INTEGRATION_TEST_CREDS: ${{ secrets.AZURE_STORAGE_INTEGRATION_TEST_CREDS }} + BIGQUERY_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_INTEGRATION_TEST_CREDS }} + BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS }} + SOURCE_BING_ADS_CREDS: ${{ secrets.SOURCE_BING_ADS_CREDS }} + BIGQUERY_TEST_CREDS: ${{ secrets.BIGQUERY_TEST_CREDS }} + BRAINTREE_TEST_CREDS: ${{ secrets.BRAINTREE_TEST_CREDS }} + CART_TEST_CREDS: ${{ secrets.CART_TEST_CREDS }} + CHARGEBEE_INTEGRATION_TEST_CREDS: ${{ secrets.CHARGEBEE_INTEGRATION_TEST_CREDS }} + DESTINATION_POSTGRES_SSH_KEY_TEST_CREDS: ${{ secrets.DESTINATION_POSTGRES_SSH_KEY_TEST_CREDS }} + DESTINATION_POSTGRES_SSH_PWD_TEST_CREDS: ${{ secrets.DESTINATION_POSTGRES_SSH_PWD_TEST_CREDS }} + DESTINATION_PUBSUB_TEST_CREDS: ${{ secrets.DESTINATION_PUBSUB_TEST_CREDS }} + DESTINATION_KEEN_TEST_CREDS: ${{ secrets.DESTINATION_KEEN_TEST_CREDS }} + DESTINATION_KVDB_TEST_CREDS: ${{ secrets.DESTINATION_KVDB_TEST_CREDS }} + DRIFT_INTEGRATION_TEST_CREDS: ${{ secrets.DRIFT_INTEGRATION_TEST_CREDS }} + SOURCE_DIXA_TEST_CREDS: ${{ secrets.SOURCE_DIXA_TEST_CREDS }} + EXCHANGE_RATES_TEST_CREDS: ${{ secrets.EXCHANGE_RATES_TEST_CREDS }} + FACEBOOK_MARKETING_TEST_INTEGRATION_CREDS: ${{ secrets.FACEBOOK_MARKETING_TEST_INTEGRATION_CREDS }} + FACEBOOK_PAGES_INTEGRATION_TEST_CREDS: ${{ secrets.FACEBOOK_PAGES_INTEGRATION_TEST_CREDS }} + FILE_SECURE_HTTPS_TEST_CREDS: ${{ secrets.FILE_SECURE_HTTPS_TEST_CREDS }} + FRESHDESK_TEST_CREDS: ${{ secrets.FRESHDESK_TEST_CREDS }} + GITLAB_INTEGRATION_TEST_CREDS: ${{ secrets.GITLAB_INTEGRATION_TEST_CREDS }} + GH_NATIVE_INTEGRATION_TEST_CREDS: ${{ secrets.GH_NATIVE_INTEGRATION_TEST_CREDS }} + GOOGLE_ADS_TEST_CREDS: ${{ secrets.GOOGLE_ADS_TEST_CREDS }} + GOOGLE_ANALYTICS_V4_TEST_CREDS: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS }} + GOOGLE_ANALYTICS_V4_TEST_CREDS_SRV_ACC: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS_SRV_ACC }} + GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD }} + GOOGLE_CLOUD_STORAGE_TEST_CREDS: ${{ secrets.GOOGLE_CLOUD_STORAGE_TEST_CREDS }} + GOOGLE_DIRECTORY_TEST_CREDS: ${{ secrets.GOOGLE_DIRECTORY_TEST_CREDS }} + GOOGLE_DIRECTORY_TEST_CREDS_OAUTH: ${{ secrets.GOOGLE_DIRECTORY_TEST_CREDS_OAUTH }} + GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS }} + GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC }} + GOOGLE_SHEETS_TESTS_CREDS: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS }} + GOOGLE_SHEETS_TESTS_CREDS_SRV_ACC: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS_SRV_ACC }} + GOOGLE_SHEETS_TESTS_CREDS_OLD: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS_OLD }} + GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS: ${{ secrets.GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS }} + GREENHOUSE_TEST_CREDS: ${{ secrets.GREENHOUSE_TEST_CREDS }} + GREENHOUSE_TEST_CREDS_LIMITED: ${{ secrets.GREENHOUSE_TEST_CREDS_LIMITED }} + HARVEST_INTEGRATION_TESTS_CREDS: ${{ secrets.HARVEST_INTEGRATION_TESTS_CREDS }} + HUBSPOT_INTEGRATION_TESTS_CREDS: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS }} + HUBSPOT_INTEGRATION_TESTS_CREDS_OAUTH: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS_OAUTH }} + INSTAGRAM_INTEGRATION_TESTS_CREDS: ${{ secrets.INSTAGRAM_INTEGRATION_TESTS_CREDS }} + INTERCOM_INTEGRATION_TEST_CREDS: ${{ secrets.INTERCOM_INTEGRATION_TEST_CREDS }} + INTERCOM_INTEGRATION_OAUTH_TEST_CREDS: ${{ secrets.INTERCOM_INTEGRATION_OAUTH_TEST_CREDS }} + ITERABLE_INTEGRATION_TEST_CREDS: ${{ secrets.ITERABLE_INTEGRATION_TEST_CREDS }} + JIRA_INTEGRATION_TEST_CREDS: ${{ secrets.JIRA_INTEGRATION_TEST_CREDS }} + KLAVIYO_TEST_CREDS: ${{ secrets.KLAVIYO_TEST_CREDS }} + SOURCE_ASANA_TEST_CREDS: ${{ secrets.SOURCE_ASANA_TEST_CREDS }} + LEVER_HIRING_INTEGRATION_TEST_CREDS: ${{ secrets.LEVER_HIRING_INTEGRATION_TEST_CREDS }} + LOOKER_INTEGRATION_TEST_CREDS: ${{ secrets.LOOKER_INTEGRATION_TEST_CREDS }} + MAILCHIMP_TEST_CREDS: ${{ secrets.MAILCHIMP_TEST_CREDS }} + MICROSOFT_TEAMS_TEST_CREDS: ${{ secrets.MICROSOFT_TEAMS_TEST_CREDS }} + MIXPANEL_INTEGRATION_TEST_CREDS: ${{ secrets.MIXPANEL_INTEGRATION_TEST_CREDS }} + MSSQL_RDS_TEST_CREDS: ${{ secrets.MSSQL_RDS_TEST_CREDS }} + PAYPAL_TRANSACTION_CREDS: ${{ secrets.SOURCE_PAYPAL_TRANSACTION_CREDS }} + PINTEREST_TEST_CREDS: ${{ secrets.PINTEREST_TEST_CREDS }} + POSTGRES_SSH_KEY_TEST_CREDS: ${{ secrets.POSTGRES_SSH_KEY_TEST_CREDS }} + POSTGRES_SSH_PWD_TEST_CREDS: ${{ secrets.POSTGRES_SSH_PWD_TEST_CREDS }} + MYSQL_SSH_KEY_TEST_CREDS: ${{ secrets.MYSQL_SSH_KEY_TEST_CREDS }} + MYSQL_SSH_PWD_TEST_CREDS: ${{ secrets.MYSQL_SSH_PWD_TEST_CREDS }} + POSTHOG_TEST_CREDS: ${{ secrets.POSTHOG_TEST_CREDS }} + PIPEDRIVE_INTEGRATION_TESTS_CREDS: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS }} + PIPEDRIVE_INTEGRATION_TESTS_CREDS_OAUTH: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS_OAUTH }} + PIPEDRIVE_INTEGRATION_TESTS_CREDS_OLD: ${{ secrets.PIPEDRIVE_INTEGRATION_TESTS_CREDS_OLD }} + RECHARGE_INTEGRATION_TEST_CREDS: ${{ secrets.RECHARGE_INTEGRATION_TEST_CREDS }} + QUICKBOOKS_TEST_CREDS: ${{ secrets.QUICKBOOKS_TEST_CREDS }} + SALESFORCE_BULK_INTEGRATION_TESTS_CREDS: ${{ secrets.SALESFORCE_BULK_INTEGRATION_TESTS_CREDS }} + SALESFORCE_INTEGRATION_TESTS_CREDS: ${{ secrets.SALESFORCE_INTEGRATION_TESTS_CREDS }} + SENDGRID_INTEGRATION_TEST_CREDS: ${{ secrets.SENDGRID_INTEGRATION_TEST_CREDS }} + SHOPIFY_INTEGRATION_TEST_CREDS: ${{ secrets.SHOPIFY_INTEGRATION_TEST_CREDS }} + SHOPIFY_INTEGRATION_TEST_OAUTH_CREDS: ${{ secrets.SHOPIFY_INTEGRATION_TEST_OAUTH_CREDS }} + SOURCE_OKTA_TEST_CREDS: ${{ secrets.SOURCE_OKTA_TEST_CREDS }} + SOURCE_SLACK_TEST_CREDS: ${{ secrets.SOURCE_SLACK_TEST_CREDS }} + SOURCE_SLACK_OAUTH_TEST_CREDS: ${{ secrets.SOURCE_SLACK_OAUTH_TEST_CREDS }} + SOURCE_US_CENSUS_TEST_CREDS: ${{ secrets.SOURCE_US_CENSUS_TEST_CREDS }} + SMARTSHEETS_TEST_CREDS: ${{ secrets.SMARTSHEETS_TEST_CREDS }} + SOURCE_SNAPCHAT_MARKETING_CREDS: ${{ secrets.SOURCE_SNAPCHAT_MARKETING_CREDS }} + SNOWFLAKE_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_INTEGRATION_TEST_CREDS }} + SNOWFLAKE_S3_COPY_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_S3_COPY_INTEGRATION_TEST_CREDS }} + SNOWFLAKE_GCS_COPY_INTEGRATION_TEST_CREDS: ${{ secrets.SNOWFLAKE_GCS_COPY_INTEGRATION_TEST_CREDS }} + SOURCE_SQUARE_CREDS: ${{ secrets.SOURCE_SQUARE_CREDS }} + SOURCE_MARKETO_TEST_CREDS: ${{ secrets.SOURCE_MARKETO_TEST_CREDS }} + SOURCE_RECURLY_INTEGRATION_TEST_CREDS: ${{ secrets.SOURCE_RECURLY_INTEGRATION_TEST_CREDS }} + SOURCE_S3_TEST_CREDS: ${{ secrets.SOURCE_S3_TEST_CREDS }} + SOURCE_S3_PARQUET_CREDS: ${{ secrets.SOURCE_S3_PARQUET_CREDS }} + SOURCE_SHORTIO_TEST_CREDS: ${{ secrets.SOURCE_SHORTIO_TEST_CREDS }} + SOURCE_STRIPE_CREDS: ${{ secrets.SOURCE_STRIPE_CREDS }} + STRIPE_INTEGRATION_CONNECTED_ACCOUNT_TEST_CREDS: ${{ secrets.STRIPE_INTEGRATION_CONNECTED_ACCOUNT_TEST_CREDS }} + SURVEYMONKEY_TEST_CREDS: ${{ secrets.SURVEYMONKEY_TEST_CREDS }} + TEMPO_INTEGRATION_TEST_CREDS: ${{ secrets.TEMPO_INTEGRATION_TEST_CREDS }} + TRELLO_TEST_CREDS: ${{ secrets.TRELLO_TEST_CREDS }} + TWILIO_TEST_CREDS: ${{ secrets.TWILIO_TEST_CREDS }} + SOURCE_TYPEFORM_CREDS: ${{ secrets.SOURCE_TYPEFORM_CREDS }} + ZENDESK_CHAT_INTEGRATION_TEST_CREDS: ${{ secrets.ZENDESK_CHAT_INTEGRATION_TEST_CREDS }} + ZENDESK_SUNSHINE_TEST_CREDS: ${{ secrets.ZENDESK_SUNSHINE_TEST_CREDS }} + ZENDESK_TALK_TEST_CREDS: ${{ secrets.ZENDESK_TALK_TEST_CREDS }} + ZENDESK_SUPPORT_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_TEST_CREDS }} + ZENDESK_SUPPORT_OAUTH_TEST_CREDS: ${{ secrets.ZENDESK_SUPPORT_OAUTH_TEST_CREDS }} + ZOOM_INTEGRATION_TEST_CREDS: ${{ secrets.ZOOM_INTEGRATION_TEST_CREDS }} + PLAID_INTEGRATION_TEST_CREDS: ${{ secrets.PLAID_INTEGRATION_TEST_CREDS }} + DESTINATION_S3_INTEGRATION_TEST_CREDS: ${{ secrets.DESTINATION_S3_INTEGRATION_TEST_CREDS }} + DESTINATION_AZURE_BLOB_CREDS: ${{ secrets.DESTINATION_AZURE_BLOB_CREDS }} + DESTINATION_GCS_CREDS: ${{ secrets.DESTINATION_GCS_CREDS }} + DESTINATION_DYNAMODB_TEST_CREDS: ${{ secrets.DESTINATION_DYNAMODB_TEST_CREDS }} + APIFY_INTEGRATION_TEST_CREDS: ${{ secrets.APIFY_INTEGRATION_TEST_CREDS }} + SOURCE_ZUORA_TEST_CREDS: ${{ secrets.SOURCE_ZUORA_TEST_CREDS }} + SOURCE_CLOSE_COM_CREDS: ${{ secrets.SOURCE_CLOSE_COM_CREDS }} + SOURCE_BAMBOO_HR_CREDS: ${{ secrets.SOURCE_BAMBOO_HR_CREDS }} + SOURCE_LINKEDIN_ADS_TEST_CREDS: ${{ secrets.SOURCE_LINKEDIN_ADS_TEST_CREDS }} + SOURCE_BIGCOMMERCE_CREDS: ${{ secrets.SOURCE_BIGCOMMERCE_CREDS }} + SOURCE_TIKTOK_MARKETING_TEST_CREDS: ${{ secrets.SOURCE_TIKTOK_MARKETING_TEST_CREDS }} + SOURCE_TIKTOK_MARKETING_PROD_TEST_CREDS: ${{ secrets.SOURCE_TIKTOK_MARKETING_PROD_TEST_CREDS }} + DESTINATION_DATABRICKS_CREDS: ${{ secrets.DESTINATION_DATABRICKS_CREDS }} + MONGODB_TEST_CREDS: ${{ secrets.MONGODB_TEST_CREDS }} + SOURCE_ONESIGNAL_TEST_CREDS: ${{ secrets.SOURCE_ONESIGNAL_TEST_CREDS }} + SOURCE_SALESLOFT_TEST_CREDS: ${{ secrets.SOURCE_SALESLOFT_TEST_CREDS }} + SOURCE_CONFLUENCE_TEST_CREDS: ${{ secrets.SOURCE_CONFLUENCE_TEST_CREDS }} + SOURCE_AMAZON_SQS_TEST_CREDS: ${{ secrets.SOURCE_AMAZON_SQS_TEST_CREDS }} + SOURCE_FRESHSERVICE_TEST_CREDS: ${{ secrets.SOURCE_FRESHSERVICE_TEST_CREDS }} + SOURCE_LEMLIST_TEST_CREDS: ${{ secrets.SOURCE_LEMLIST_TEST_CREDS }} + SOURCE_STRAVA_TEST_CREDS: ${{ secrets.SOURCE_STRAVA_TEST_CREDS }} + SOURCE_PAYSTACK_TEST_CREDS: ${{ secrets.SOURCE_PAYSTACK_TEST_CREDS }} + SOURCE_DELIGHTED_TEST_CREDS: ${{ secrets.SOURCE_DELIGHTED_TEST_CREDS }} + SOURCE_RETENTLY_TEST_CREDS: ${{ secrets.SOURCE_RETENTLY_TEST_CREDS }} + SOURCE_SENTRY_TEST_CREDS: ${{ secrets.SOURCE_SENTRY_TEST_CREDS }} + SOURCE_FRESHSALES_TEST_CREDS: ${{ secrets.SOURCE_FRESHSALES_TEST_CREDS }} + SOURCE_MONDAY_TEST_CREDS: ${{ secrets.SOURCE_MONDAY_TEST_CREDS }} + SOURCE_COMMERCETOOLS_TEST_CREDS: ${{ secrets.SOURCE_COMMERCETOOLS_TEST_CREDS }} - run: | ./tools/bin/ci_integration_test.sh ${{ github.event.inputs.connector }} name: test ${{ github.event.inputs.connector }} diff --git a/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml index d584e7af575bf..d65e7ff230364 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml @@ -7,6 +7,8 @@ tests: connection: - config_path: "secrets/config.json" status: "succeed" + - config_path: "secrets/config_token.json" + status: "succeed" - config_path: "integration_tests/invalid_config.json" status: "failed" discovery: diff --git a/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-docker.sh index e4d8b1cef8961..c51577d10690c 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-docker.sh +++ b/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-docker.sh @@ -1,7 +1,7 @@ #!/usr/bin/env sh # Build latest connector image -docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2) +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) # Pull latest acctest image docker pull airbyte/source-acceptance-test:latest diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py index f5ad4b9efe477..884de1702b206 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py @@ -303,6 +303,19 @@ class SourceLinkedinAds(AbstractSource): - implementation to call each stream with it's input parameters. """ + @classmethod + def get_authenticator(cls, config: Mapping[str, Any])-> TokenAuthenticator: + """Validate input parameters and generate a necessary Authentication object""" + auth_method = config.get("credentials", {}).get("credentials") + if not auth_method or auth_method == "access_token": + # support of backward compatibility with old exists configs + access_token = config["credentials"]["access_token"] if auth_method else config["access_token"] + return TokenAuthenticator(token=access_token) + elif auth_method == "oAuth2.0": + raise Exception("aaaaaaaa %s" % config) + + raise Exception("incorrect input parameters") + def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, any]: """ Testing connection availability for the connector. @@ -310,7 +323,7 @@ def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> :: more info: https://docs.microsoft.com/linkedin/consumer/integrations/self-serve/sign-in-with-linkedin """ - header = TokenAuthenticator(token=config["access_token"]).get_auth_header() + header = self.get_authenticator(config).get_auth_header() profile_url = "https://api.linkedin.com/v2/me" try: @@ -326,7 +339,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: Passing config to the streams. """ - config["authenticator"] = TokenAuthenticator(token=config["access_token"]) + config["authenticator"] = self.get_authenticator(config) return [ Accounts(config), diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json index 491664b45a1c7..c5057c6b6f5d4 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json @@ -4,21 +4,19 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Linkedin Ads Spec", "type": "object", - "required": ["start_date", "access_token"], - "additionalProperties": false, + "required": [ + "start_date" + ], + "additionalProperties": true, "properties": { "start_date": { "type": "string", "title": "Start Date", "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", "description": "Date in the format 2020-09-17. Any data before this date will not be replicated.", - "examples": ["2021-05-17"] - }, - "access_token": { - "type": "string", - "title": "Access Token", - "description": "The token value ganerated using Auth Code", - "airbyte_secret": true + "examples": [ + "2021-05-17" + ] }, "account_ids": { "title": "Account IDs", @@ -28,7 +26,92 @@ "type": "integer" }, "default": [] + }, + "credentials": { + "title": "Authorization Method", + "type": "object", + "oneOf": [ + { + "type": "object", + "title": "oAuth2.0", + "required": [ + "client_id", + "client_secret", + "access_token" + ], + "properties": { + "credentials": { + "type": "string", + "const": "oAuth2.0" + }, + "client_id": { + "type": "string", + "description": "The API ID of the Gitlab developer application.", + "airbyte_secret": true + }, + "client_secret": { + "type": "string", + "description": "The API Secret the Gitlab developer application.", + "airbyte_secret": true + }, + "access_token": { + "type": "string", + "description": "Access Token for making authenticated requests.", + "airbyte_secret": true + }, + "refresh_token": { + "type": "string", + "description": "The key to refresh the expired access_token.", + "airbyte_secret": true + } + } + }, + { + "title": "Access Token", + "type": "object", + "required": [ + "access_token" + ], + "properties": { + "credentials": { + "type": "string", + "const": "access_token" + }, + "access_token": { + "type": "string", + "title": "Access Token", + "description": "The token value ganerated using Auth Code", + "airbyte_secret": true + } + } + } + ] } } + }, + "authSpecification": { + "auth_type": "oauth2.0", + "oauth2Specification": { + "rootObject": [ + "credentials", + "0" + ], + "oauthFlowInitParameters": [ + [ + "client_id" + ], + [ + "client_secret" + ] + ], + "oauthFlowOutputParameters": [ + [ + "access_token" + ], + [ + "refresh_token" + ] + ] + } } -} +} \ No newline at end of file From c6087aa9402ad6e98ff51e3b1097b82cce31692d Mon Sep 17 00:00:00 2001 From: antixar Date: Wed, 10 Nov 2021 11:06:07 +0200 Subject: [PATCH 27/36] add java files --- .../oauth/OAuthImplementationFactory.java | 2 + .../oauth/flows/LinkedinAdsOAuthFlow.java | 69 +++++++++++++++++++ 2 files changed, 71 insertions(+) create mode 100644 airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 7c613dfcd646a..36f9cbda6c3cb 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -12,6 +12,7 @@ import io.airbyte.oauth.flows.GithubOAuthFlow; import io.airbyte.oauth.flows.HubspotOAuthFlow; import io.airbyte.oauth.flows.IntercomOAuthFlow; +//import io.airbyte.oauth.flows.LinkedinAdsOAuthFlow; import io.airbyte.oauth.flows.SalesforceOAuthFlow; import io.airbyte.oauth.flows.SlackOAuthFlow; import io.airbyte.oauth.flows.SurveymonkeyOAuthFlow; @@ -44,6 +45,7 @@ public OAuthImplementationFactory(final ConfigRepository configRepository, final .put("airbyte/source-hubspot", new HubspotOAuthFlow(configRepository, httpClient)) .put("airbyte/source-intercom", new IntercomOAuthFlow(configRepository, httpClient)) .put("airbyte/source-instagram", new InstagramOAuthFlow(configRepository, httpClient)) + // .put("airbyte/source-linkedin-ads", new LinkedinAdsOAuthFlow(configRepository, httpClient)) .put("airbyte/source-salesforce", new SalesforceOAuthFlow(configRepository, httpClient)) .put("airbyte/source-slack", new SlackOAuthFlow(configRepository, httpClient)) .put("airbyte/source-surveymonkey", new SurveymonkeyOAuthFlow(configRepository, httpClient)) diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java new file mode 100644 index 0000000000000..9a24381c9e5ff --- /dev/null +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.oauth.BaseOAuth2Flow; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.http.HttpClient; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.function.Supplier; +import org.apache.http.client.utils.URIBuilder; + +public class LinkedinAdsOAuthFlow extends BaseOAuth2Flow { + + private static final String AUTHORIZE_URL = "https://www.linkedin.com/oauth/v2/authorization"; + private static final String ACCESS_TOKEN_URL = "https://www.linkedin.com/oauth/v2/accessToken"; + private static final String SCOPES = "r_ads_reporting r_ads"; + + public LinkedinAdsOAuthFlow(ConfigRepository configRepository, HttpClient httpClient) { + super(configRepository, httpClient); + } + + @VisibleForTesting + public LinkedinAdsOAuthFlow(ConfigRepository configRepository, final HttpClient httpClient, Supplier stateSupplier) { + super(configRepository, httpClient, stateSupplier); + } + + @Override + protected String formatConsentUrl(UUID definitionId, String clientId, String redirectUrl) throws IOException { + try { + return new URIBuilder(AUTHORIZE_URL) + .addParameter("client_id", clientId) + .addParameter("redirect_uri", redirectUrl) + .addParameter("response_type", "code") + .addParameter("scope", SCOPES) + .addParameter("state", getState()) + .build().toString(); + } catch (URISyntaxException e) { + throw new IOException("Failed to format Consent URL for OAuth flow", e); + } + } + + @Override + protected String getAccessTokenUrl() { + return ACCESS_TOKEN_URL; + } + + @Override + protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) { + // Intercom does not have refresh token but calls it "long lived access token" instead: + // see https://developers.intercom.com/building-apps/docs/setting-up-oauth + Preconditions.checkArgument(data.has("access_token"), "Missing 'access_token' in query params from %s", ACCESS_TOKEN_URL); + return Map.of("access_token", data.get("access_token").asText()); + } + + @Override + protected List getDefaultOAuthOutputPath() { + return List.of(); + } + +} From 7b9807d9a33ede3a3a2bc5764cc6cfec6ffa9ff9 Mon Sep 17 00:00:00 2001 From: antixar Date: Wed, 10 Nov 2021 11:07:24 +0200 Subject: [PATCH 28/36] add java files --- docs/connector-development/README.md | 15 ++-- tools/bin/ci_credentials.sh | 129 +-------------------------- 2 files changed, 7 insertions(+), 137 deletions(-) diff --git a/docs/connector-development/README.md b/docs/connector-development/README.md index 6e8e01034413e..ebf9705e57843 100644 --- a/docs/connector-development/README.md +++ b/docs/connector-development/README.md @@ -130,15 +130,10 @@ Once you've finished iterating on the changes to a connector as specified in its ## Using credentials in CI In order to run integration tests in CI, you'll often need to inject credentials into CI. There are a few steps for doing this: -1. **Place the credentials into Google Secret Manager(GSM)**: Airbyte uses a project 'Google Secret Manager' service as the source of truth for all secrets. Place the credentials **exactly as they should be used by the connector** into a GSM secret i.e.: it should basically be a copy paste of the `config.json` passed into a connector via the `--config` flag. We use the following naming pattern: `SECRET__CREDS` e.g: `SECRET_SOURCE-S3_CREDS` or `SECRET_DESTINATION-SNOWFLAKE_CREDS`. Access to the GSM storage is limited. Each developer should have the role `Development_CI_Secrets` of the project `dataline-integration-testing`. -2. **Add the GSM secret's labels**: - * `connector` (required) -- unique connector's name or set of connectors' names with '_' as delimiter i.e.: `connector=source-s3`, `connector=destination-snowflake` - * `filename` (optional) -- custom target secret file. Unfortunately Google doesn't use '.' into labels' values and so Airbyte CI scripts will add '.json' to the end automatically. By default secrets will be saved to `./secrets/config.json` i.e: `filename=config_auth` => `secrets/config_auth.json` -3. That should be it. - -#### How to migrate to the new secrets' logic: -1. Create all necessary secrets how it is explained above. -2. Remove all lines with old connector's GutHub secrets into this file: tools/bin/ci_credentials.sh -3. Remove all old secrets from Githubthe secret from env variables to the connector directory**: edit `tools/bin/ci_credentials.sh` to write the secret into the `secrets/` directory of the relevant connector. + +1. **Place the credentials into Lastpass**: Airbyte uses a shared Lastpass account as the source of truth for all secrets. Place the credentials **exactly as they should be used by the connector** into a secure note i.e: it should basically be a copy paste of the `config.json` passed into a connector via the `--config` flag. We use the following naming pattern: ` creds` e.g: `source google adwords creds` or `destination snowflake creds`. +2. **Add the credentials to Github Secrets**: To inject credentials into a CI workflow, the first step is to add it to Github Secrets, specifically within the ["more-secrets" environment](https://github.com/airbytehq/airbyte/settings/environments/276695501/edit). Admin access to the Airbyte repo is required to do this. All Airbyte engineers have admin access and should be able to do this themselves. External contributors or contractors will need to request this from their team lead or project manager who should have admin access. Follow the same naming pattern as all the other secrets e.g: if you are placing credentials for source google adwords, name the secret `SOURCE_GOOGLE_ADWORDS_CREDS`. After doing this step, the secret will be available in the relevant Github workflows using the workflow secrets syntax. +3. **Inject the credentials into test and publish CI workflows**: edit the files `.github/workflows/publish-command.yml` and `.github/workflows/test-command.yml` to inject the secret into the CI run. This will make these secrets available to the `/test` and `/publish` commands. +4. **During CI, write the secret from env variables to the connector directory**: edit `tools/bin/ci_credentials.sh` to write the secret into the `secrets/` directory of the relevant connector. 5. That should be it. diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index eab7e8c62e825..95741a3bf8fe9 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -1,50 +1,16 @@ #!/usr/bin/env bash . tools/lib/lib.sh -. tools/lib/gcp-token.sh set -e -# all secrets will be loaded if the second argument is not present -CONNECTOR_FULLNAME=${1:-all} -CONNECTOR_NAME=`echo ${CONNECTOR_FULLNAME} | rev | cut -d'/' -f1 | rev` - -GSM_SCOPES="https://www.googleapis.com/auth/cloud-platform" - -declare -A SECRET_MAP - - function write_standard_creds() { local connector_name=$1 local creds=$2 local cred_filename=${3:-config.json} - local source_name=${4:-github} [ -z "$connector_name" ] && error "Empty connector name" - [ -z "$creds" ] && error "!!!!!Creds not set for the connector $connector_name from ${source_name}" - - if [[ $CONNECTOR_NAME != "all" && ${connector_name} != ${CONNECTOR_NAME} ]]; then - return 0 - fi - local key="${connector_name}#${cred_filename}" - [[ -z "${creds}" ]] && error "Empty credential for the connector '${key} from ${source_name}" - - if [ -v SECRET_MAP[${key}] ]; then - echo "The connector '${key}' was added before" - return 0 - fi - echo "register the secret ${key} from ${source_name}" - SECRET_MAP[${key}]="${creds}" - return 0 -} - -function write_secret_to_disk() { - local connector_name=$1 - local cred_filename=$2 - local creds=$3 - if jq -e . >/dev/null 2>&1 <<<${creds}; then - error "Failed to parse JSON for '${connector_name}' => ${cred_filename}" - fi + [ -z "$creds" ] && error "Creds not set for $connector_name" if [ "$connector_name" = "base-normalization" ]; then local secrets_dir="airbyte-integrations/bases/${connector_name}/secrets" @@ -52,96 +18,9 @@ function write_secret_to_disk() { local secrets_dir="airbyte-integrations/connectors/${connector_name}/secrets" fi mkdir -p "$secrets_dir" - echo "Saved a secret => ${secrets_dir}/${cred_filename}" echo "$creds" > "${secrets_dir}/${cred_filename}" } -function write_all_secrets() { - for key in "${!SECRET_MAP[@]}"; do - local connector_name=$(echo ${key} | cut -d'#' -f1) - local cred_filename=$(echo ${key} | cut -d'#' -f2) - local creds=${SECRET_MAP[${key}]} - write_secret_to_disk ${connector_name} ${cred_filename} "${creds}" - - done - return 0 -} - - -function export_github_secrets(){ - local pairs=`echo ${GITHUB_PROVIDED_SECRETS_JSON} | jq -c 'keys[] as $k | {"name": $k, "value": .[$k]} | @base64'` - while read row; do - pair=$(echo "${row}" | tr -d '"' | base64 -d) - local key=$(echo ${pair} | jq -r .name) - local value=$(echo ${pair} | jq -r .value) - if [[ "$key" == *"_CREDS"* ]]; then - declare -gxr "${key}"="$(echo ${value})" - fi - done <<< ${pairs} - unset GITHUB_PROVIDED_SECRETS_JSON -} - -function export_gsm_secrets(){ - local config_file=`mktemp` - echo "${GCP_GSM_CREDENTIALS}" > ${config_file} - local access_token=$(get_gcp_access_token "${config_file}" "${GSM_SCOPES}") - local project_id=$(parse_project_id "${config_file}") - rm ${config_file} - - # docs: https://cloud.google.com/secret-manager/docs/filtering#api - local filter="name:SECRET_" - [[ ${CONNECTOR_NAME} != "all" ]] && filter="${filter} AND labels.connector:${CONNECTOR_NAME}" - local uri="https://secretmanager.googleapis.com/v1/projects/${project_id}/secrets" - local next_token='' - while true; do - local data=$(curl -s --get --fail "${uri}" \ - --data-urlencode "filter=${filter}" \ - --data-urlencode "pageToken=${next_token}" \ - --header "authorization: Bearer ${access_token}" \ - --header "content-type: application/json" \ - --header "x-goog-user-project: ${project_id}") - [[ -z ${data} ]] && error "Can't load secret for connector ${CONNECTOR_NAME}" - # GSM returns an empty JSON object if secrets are not found. - # It breaks JSON parsing by the 'jq' utility. The simplest fix is response normalization - [[ ${data} == "{}" ]] && data='{"secrets": []}' - - for row in $(echo "${data}" | jq -r '.secrets[] | @base64'); do - local secret_info=$(echo ${row} | base64 --decode) - local secret_name=$(echo ${secret_info}| jq -r .name) - local label_filename=$(echo ${secret_info}| jq -r '.labels.filename // "config"') - local label_connectors=$(echo ${secret_info}| jq -r '.labels.connector // ""') - - # skip secrets without the label "connector" - [[ -z ${label_connectors} ]] && continue - if [[ "$label_connectors" != *"${CONNECTOR_NAME}"* ]]; then - echo "Not found ${CONNECTOR_NAME} info into the label 'connector' of the secret ${secret_name}" - continue - fi - - # all secret file names should be finished with ".json" - # but '.' cant be used in google, so we append it - local filename="${label_filename}.json" - echo "found the Google secret of ${label_connectors}: ${secret_name} => ${filename}" - local secret_uri="https://secretmanager.googleapis.com/v1/${secret_name}/versions/latest:access" - local secret_data=$(curl -s --get --fail "${secret_uri}" \ - --header "authorization: Bearer ${access_token}" \ - --header "content-type: application/json" \ - --header "x-goog-user-project: ${project_id}") - [[ -z ${secret_data} ]] && error "Can't load secrets' list" - - secret_data=$(echo ${secret_data} | jq -r '.payload.data // ""' | base64 -d) - write_standard_creds "${CONNECTOR_NAME}" "${secret_data}" "${filename}" "gsm" - done - next_token=`echo ${data} | jq -r '.nextPageToken // ""'` - [[ -z ${next_token} ]] && break - done - return 0 -} - -export_gsm_secrets -export_github_secrets - - # Please maintain this organisation and alphabetise. write_standard_creds destination-bigquery "$BIGQUERY_INTEGRATION_TEST_CREDS" "credentials.json" write_standard_creds destination-bigquery-denormalized "$BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS" "credentials.json" @@ -197,7 +76,7 @@ write_standard_creds source-file "$AZURE_STORAGE_INTEGRATION_TEST_CREDS" "azblob write_standard_creds source-file "$FILE_SECURE_HTTPS_TEST_CREDS" write_standard_creds source-file-secure "$FILE_SECURE_HTTPS_TEST_CREDS" write_standard_creds source-freshdesk "$FRESHDESK_TEST_CREDS" -write_standard_creds source-freshsales "$SOURCE_FRESHSALES_TEST_CREDS" +write_standard_creds source-freshsales "$SOURCE_FRESHSALES_TEST_CREDS" write_standard_creds source-freshservice "$SOURCE_FRESHSERVICE_TEST_CREDS" write_standard_creds source-facebook-marketing "$FACEBOOK_MARKETING_TEST_INTEGRATION_CREDS" write_standard_creds source-facebook-pages "$FACEBOOK_PAGES_INTEGRATION_TEST_CREDS" @@ -289,7 +168,3 @@ write_standard_creds source-zendesk-support "$ZENDESK_SUPPORT_OAUTH_TEST_CREDS" write_standard_creds source-zendesk-talk "$ZENDESK_TALK_TEST_CREDS" write_standard_creds source-zoom-singer "$ZOOM_INTEGRATION_TEST_CREDS" write_standard_creds source-zuora "$SOURCE_ZUORA_TEST_CREDS" - -write_all_secrets -exit $? - From fd96112fef189d8b231bce76cac7924b0c61255e Mon Sep 17 00:00:00 2001 From: antixar Date: Thu, 11 Nov 2021 02:32:35 +0200 Subject: [PATCH 29/36] add oauth2 logic --- .../acceptance-test-config.yml | 16 ++++---- .../source_linkedin_ads/source.py | 17 ++++++-- .../source_linkedin_ads/spec.json | 10 +---- .../oauth/OAuthImplementationFactory.java | 4 +- .../oauth/flows/LinkedinAdsOAuthFlow.java | 41 +++++++++++++++---- 5 files changed, 60 insertions(+), 28 deletions(-) diff --git a/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml index d65e7ff230364..22e1d08376147 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml @@ -5,21 +5,23 @@ tests: spec: - spec_path: "source_linkedin_ads/spec.json" connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "secrets/config_token.json" + # - config_path: "secrets/config.json" + # status: "succeed" + # - config_path: "secrets/config_token.json" + # status: "succeed" + - config_path: "secrets/config_oauth.json" status: "succeed" - config_path: "integration_tests/invalid_config.json" status: "failed" discovery: - - config_path: "secrets/config.json" + - config_path: "secrets/config_oauth.json" basic_read: - - config_path: "secrets/config.json" + - config_path: "secrets/config_oauth.json" configured_catalog_path: "integration_tests/configured_catalog.json" incremental: - - config_path: "secrets/config.json" + - config_path: "secrets/config_oauth.json" configured_catalog_path: "integration_tests/configured_catalog.json" future_state_path: "integration_tests/abnormal_state.json" full_refresh: - - config_path: "secrets/config.json" + - config_path: "secrets/config_oauth.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py index 884de1702b206..df1afeafde887 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py @@ -13,6 +13,7 @@ from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http import HttpStream from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator +from airbyte_cdk.sources.streams.http.auth import Oauth2Authenticator from .analytics import make_analytics_slices, merge_chunks, update_analytics_params from .utils import get_parent_stream_values, transform_data @@ -305,15 +306,25 @@ class SourceLinkedinAds(AbstractSource): @classmethod def get_authenticator(cls, config: Mapping[str, Any])-> TokenAuthenticator: - """Validate input parameters and generate a necessary Authentication object""" + """ + Validate input parameters and generate a necessary Authentication object + This connectors support 2 auth methods: + 1) direct access token with TTL = 2 months + 2) refresh token (TTL = 1 year) which can be converted to access tokens + Every new refresh revokes all previous access tokens q + """ auth_method = config.get("credentials", {}).get("credentials") if not auth_method or auth_method == "access_token": # support of backward compatibility with old exists configs access_token = config["credentials"]["access_token"] if auth_method else config["access_token"] return TokenAuthenticator(token=access_token) elif auth_method == "oAuth2.0": - raise Exception("aaaaaaaa %s" % config) - + return Oauth2Authenticator( + token_refresh_endpoint="https://www.linkedin.com/oauth/v2/accessToken", + client_id=config["credentials"]["client_id"], + client_secret=config["credentials"]["client_secret"], + refresh_token=config["credentials"]["refresh_token"] + ) raise Exception("incorrect input parameters") def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, any]: diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json index c5057c6b6f5d4..a984ab0c2242e 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json @@ -37,7 +37,7 @@ "required": [ "client_id", "client_secret", - "access_token" + "refresh_token" ], "properties": { "credentials": { @@ -54,11 +54,6 @@ "description": "The API Secret the Gitlab developer application.", "airbyte_secret": true }, - "access_token": { - "type": "string", - "description": "Access Token for making authenticated requests.", - "airbyte_secret": true - }, "refresh_token": { "type": "string", "description": "The key to refresh the expired access_token.", @@ -105,9 +100,6 @@ ] ], "oauthFlowOutputParameters": [ - [ - "access_token" - ], [ "refresh_token" ] diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 36f9cbda6c3cb..9ce17b779ec59 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -12,7 +12,7 @@ import io.airbyte.oauth.flows.GithubOAuthFlow; import io.airbyte.oauth.flows.HubspotOAuthFlow; import io.airbyte.oauth.flows.IntercomOAuthFlow; -//import io.airbyte.oauth.flows.LinkedinAdsOAuthFlow; +import io.airbyte.oauth.flows.LinkedinAdsOAuthFlow; import io.airbyte.oauth.flows.SalesforceOAuthFlow; import io.airbyte.oauth.flows.SlackOAuthFlow; import io.airbyte.oauth.flows.SurveymonkeyOAuthFlow; @@ -45,7 +45,7 @@ public OAuthImplementationFactory(final ConfigRepository configRepository, final .put("airbyte/source-hubspot", new HubspotOAuthFlow(configRepository, httpClient)) .put("airbyte/source-intercom", new IntercomOAuthFlow(configRepository, httpClient)) .put("airbyte/source-instagram", new InstagramOAuthFlow(configRepository, httpClient)) - // .put("airbyte/source-linkedin-ads", new LinkedinAdsOAuthFlow(configRepository, httpClient)) + .put("airbyte/source-linkedin-ads", new LinkedinAdsOAuthFlow(configRepository, httpClient)) .put("airbyte/source-salesforce", new SalesforceOAuthFlow(configRepository, httpClient)) .put("airbyte/source-slack", new SlackOAuthFlow(configRepository, httpClient)) .put("airbyte/source-surveymonkey", new SurveymonkeyOAuthFlow(configRepository, httpClient)) diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java index 9a24381c9e5ff..5fb58ad0dbf77 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java @@ -7,11 +7,16 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.oauth.BaseOAuth2Flow; import java.io.IOException; +import java.net.URI; import java.net.URISyntaxException; import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; import java.util.List; import java.util.Map; import java.util.UUID; @@ -22,7 +27,7 @@ public class LinkedinAdsOAuthFlow extends BaseOAuth2Flow { private static final String AUTHORIZE_URL = "https://www.linkedin.com/oauth/v2/authorization"; private static final String ACCESS_TOKEN_URL = "https://www.linkedin.com/oauth/v2/accessToken"; - private static final String SCOPES = "r_ads_reporting r_ads"; + private static final String SCOPES = "r_ads_reporting r_ads r_basicprofile"; public LinkedinAdsOAuthFlow(ConfigRepository configRepository, HttpClient httpClient) { super(configRepository, httpClient); @@ -53,17 +58,39 @@ protected String getAccessTokenUrl() { return ACCESS_TOKEN_URL; } + @Override + protected Map getAccessTokenQueryParameters(final String clientId, + final String clientSecret, + final String authCode, + final String redirectUrl) { + return ImmutableMap.builder() + .putAll(super.getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)) + .put("grant_type", "authorization_code") + .build(); + } + @Override protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) { // Intercom does not have refresh token but calls it "long lived access token" instead: // see https://developers.intercom.com/building-apps/docs/setting-up-oauth - Preconditions.checkArgument(data.has("access_token"), "Missing 'access_token' in query params from %s", ACCESS_TOKEN_URL); - return Map.of("access_token", data.get("access_token").asText()); + Preconditions.checkArgument(data.has("refresh_token"), + "Missing 'refresh_token' in query params from %s", ACCESS_TOKEN_URL); + return Map.of("refresh_token", data.get("refresh_token").asText()); } - @Override - protected List getDefaultOAuthOutputPath() { - return List.of(); - } +// protected Map completeOAuthFlow(final String clientId, +// final String clientSecret, +// final String authCode, +// final String redirectUrl, +// final JsonNode oAuthParamConfig) +// throws IOException { +// final var result = super.completeOAuthFlow(clientId, clientSecret, authCode, redirectUrl, oAuthParamConfig); +// // Refresh token can be used throughout 1 year but for this we need to forward client_id/client_secret additionally +// return Map.of( +// "client_secret", clientSecret, +// "client_id", clientId, +// "refresh_token", (String)result.get("refresh_token") +// ); +// } } From 5bed57dc2bb9d8c68c077d7d033bea76feca5e9a Mon Sep 17 00:00:00 2001 From: antixar Date: Thu, 11 Nov 2021 15:02:56 +0200 Subject: [PATCH 30/36] update doc and version --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-linkedin-ads/Dockerfile | 2 +- .../source_linkedin_ads/source.py | 17 ++++---- .../source_linkedin_ads/spec.json | 40 ++++--------------- .../oauth/flows/LinkedinAdsOAuthFlow.java | 37 ++--------------- docs/integrations/sources/linkedin-ads.md | 1 + tools/bin/ci_credentials.sh | 1 - 8 files changed, 23 insertions(+), 79 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 9108faaf17169..fb5b01a4b96cb 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -299,7 +299,7 @@ - name: LinkedIn Ads sourceDefinitionId: 137ece28-5434-455c-8f34-69dc3782f451 dockerRepository: airbyte/source-linkedin-ads - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/linkedin-ads sourceType: api - name: Looker diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index ed6768f7e5a20..83ab7c529c31c 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -3037,7 +3037,7 @@ - - "client_secret" - - "refresh_token" oauthFlowOutputParameters: [] -- dockerImage: "airbyte/source-linkedin-ads:0.1.1" +- dockerImage: "airbyte/source-linkedin-ads:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/linkedin-ads" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-linkedin-ads/Dockerfile b/airbyte-integrations/connectors/source-linkedin-ads/Dockerfile index 31a00a2003513..70af90b7cf9bb 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-linkedin-ads/Dockerfile @@ -33,5 +33,5 @@ COPY source_linkedin_ads ./source_linkedin_ads ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/source-linkedin-ads diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py index df1afeafde887..01e37f1b510c4 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py @@ -12,8 +12,7 @@ from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http import HttpStream -from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator -from airbyte_cdk.sources.streams.http.auth import Oauth2Authenticator +from airbyte_cdk.sources.streams.http.auth import Oauth2Authenticator, TokenAuthenticator from .analytics import make_analytics_slices, merge_chunks, update_analytics_params from .utils import get_parent_stream_values, transform_data @@ -305,13 +304,13 @@ class SourceLinkedinAds(AbstractSource): """ @classmethod - def get_authenticator(cls, config: Mapping[str, Any])-> TokenAuthenticator: + def get_authenticator(cls, config: Mapping[str, Any]) -> TokenAuthenticator: """ - Validate input parameters and generate a necessary Authentication object - This connectors support 2 auth methods: - 1) direct access token with TTL = 2 months - 2) refresh token (TTL = 1 year) which can be converted to access tokens - Every new refresh revokes all previous access tokens q + Validate input parameters and generate a necessary Authentication object + This connectors support 2 auth methods: + 1) direct access token with TTL = 2 months + 2) refresh token (TTL = 1 year) which can be converted to access tokens + Every new refresh revokes all previous access tokens q """ auth_method = config.get("credentials", {}).get("credentials") if not auth_method or auth_method == "access_token": @@ -323,7 +322,7 @@ def get_authenticator(cls, config: Mapping[str, Any])-> TokenAuthenticator: token_refresh_endpoint="https://www.linkedin.com/oauth/v2/accessToken", client_id=config["credentials"]["client_id"], client_secret=config["credentials"]["client_secret"], - refresh_token=config["credentials"]["refresh_token"] + refresh_token=config["credentials"]["refresh_token"], ) raise Exception("incorrect input parameters") diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json index a984ab0c2242e..ba0690f5e0bac 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json @@ -4,9 +4,7 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Linkedin Ads Spec", "type": "object", - "required": [ - "start_date" - ], + "required": ["start_date"], "additionalProperties": true, "properties": { "start_date": { @@ -14,9 +12,7 @@ "title": "Start Date", "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", "description": "Date in the format 2020-09-17. Any data before this date will not be replicated.", - "examples": [ - "2021-05-17" - ] + "examples": ["2021-05-17"] }, "account_ids": { "title": "Account IDs", @@ -34,11 +30,7 @@ { "type": "object", "title": "oAuth2.0", - "required": [ - "client_id", - "client_secret", - "refresh_token" - ], + "required": ["client_id", "client_secret", "refresh_token"], "properties": { "credentials": { "type": "string", @@ -64,9 +56,7 @@ { "title": "Access Token", "type": "object", - "required": [ - "access_token" - ], + "required": ["access_token"], "properties": { "credentials": { "type": "string", @@ -87,23 +77,9 @@ "authSpecification": { "auth_type": "oauth2.0", "oauth2Specification": { - "rootObject": [ - "credentials", - "0" - ], - "oauthFlowInitParameters": [ - [ - "client_id" - ], - [ - "client_secret" - ] - ], - "oauthFlowOutputParameters": [ - [ - "refresh_token" - ] - ] + "rootObject": ["credentials", "0"], + "oauthFlowInitParameters": [["client_id"], ["client_secret"]], + "oauthFlowOutputParameters": [["refresh_token"]] } } -} \ No newline at end of file +} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java index 5fb58ad0dbf77..f158b046f975a 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java @@ -4,20 +4,13 @@ package io.airbyte.oauth.flows; -import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.oauth.BaseOAuth2Flow; import java.io.IOException; -import java.net.URI; import java.net.URISyntaxException; import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.util.List; import java.util.Map; import java.util.UUID; import java.util.function.Supplier; @@ -64,33 +57,9 @@ protected Map getAccessTokenQueryParameters(final String clientI final String authCode, final String redirectUrl) { return ImmutableMap.builder() - .putAll(super.getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)) - .put("grant_type", "authorization_code") - .build(); + .putAll(super.getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)) + .put("grant_type", "authorization_code") + .build(); } - @Override - protected Map extractOAuthOutput(final JsonNode data, final String accessTokenUrl) { - // Intercom does not have refresh token but calls it "long lived access token" instead: - // see https://developers.intercom.com/building-apps/docs/setting-up-oauth - Preconditions.checkArgument(data.has("refresh_token"), - "Missing 'refresh_token' in query params from %s", ACCESS_TOKEN_URL); - return Map.of("refresh_token", data.get("refresh_token").asText()); - } - -// protected Map completeOAuthFlow(final String clientId, -// final String clientSecret, -// final String authCode, -// final String redirectUrl, -// final JsonNode oAuthParamConfig) -// throws IOException { -// final var result = super.completeOAuthFlow(clientId, clientSecret, authCode, redirectUrl, oAuthParamConfig); -// // Refresh token can be used throughout 1 year but for this we need to forward client_id/client_secret additionally -// return Map.of( -// "client_secret", clientSecret, -// "client_id", clientId, -// "refresh_token", (String)result.get("refresh_token") -// ); -// } - } diff --git a/docs/integrations/sources/linkedin-ads.md b/docs/integrations/sources/linkedin-ads.md index 9039a1f2be19b..b47b362f68f9b 100644 --- a/docs/integrations/sources/linkedin-ads.md +++ b/docs/integrations/sources/linkedin-ads.md @@ -139,6 +139,7 @@ The complete set of prmissions is: | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.2 | 2021-11-11 | [7839](https://github.com/airbytehq/airbyte/pull/7839) | Added oauth support | | 0.1.1 | 2021-10-02 | [6610](https://github.com/airbytehq/airbyte/pull/6610) | Fix for `Campaigns/targetingCriteria` transformation, coerced `Creatives/variables/values` to string by default | | 0.1.0 | 2021-09-05 | [5285](https://github.com/airbytehq/airbyte/pull/5285) | Initial release of Native LinkedIn Ads connector for Airbyte | diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index 01822588c87cf..e451f492e2e69 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -235,7 +235,6 @@ read_secrets source-klaviyo "$KLAVIYO_TEST_CREDS" read_secrets source-lemlist "$SOURCE_LEMLIST_TEST_CREDS" read_secrets source-lever-hiring "$LEVER_HIRING_INTEGRATION_TEST_CREDS" read_secrets source-looker "$LOOKER_INTEGRATION_TEST_CREDS" -read_secrets source-linkedin-ads "$SOURCE_LINKEDIN_ADS_TEST_CREDS" read_secrets source-mailchimp "$MAILCHIMP_TEST_CREDS" read_secrets source-marketo "$SOURCE_MARKETO_TEST_CREDS" read_secrets source-microsoft-teams "$MICROSOFT_TEAMS_TEST_CREDS" From 3acbbc6689179095b333879e4e1953fccc648746 Mon Sep 17 00:00:00 2001 From: antixar Date: Fri, 12 Nov 2021 20:05:19 +0200 Subject: [PATCH 31/36] add tests --- .../oauth/flows/LinkedinAdsOAuthFlowTest.java | 80 +++++++++++++++++++ 1 file changed, 80 insertions(+) create mode 100644 airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlowTest.java diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlowTest.java new file mode 100644 index 0000000000000..3dc44640c669b --- /dev/null +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlowTest.java @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.SourceOAuthParameter; +import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.validation.json.JsonValidationException; +import java.io.IOException; +import java.net.http.HttpClient; +import java.net.http.HttpResponse; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class LinkedinAdsOAuthFlowTest { + + private UUID workspaceId; + private UUID definitionId; + private LinkedinAdsOAuthFlow linkedinAdsOAuthFlow; + private HttpClient httpClient; + + private static final String REDIRECT_URL = "https://airbyte.io"; + + private static String getConstantState() { + return "state"; + } + + @BeforeEach + public void setup() throws IOException, JsonValidationException { + workspaceId = UUID.randomUUID(); + definitionId = UUID.randomUUID(); + ConfigRepository configRepository = mock(ConfigRepository.class); + httpClient = mock(HttpClient.class); + when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() + .withOauthParameterId(UUID.randomUUID()) + .withSourceDefinitionId(definitionId) + .withWorkspaceId(workspaceId) + .withConfiguration(Jsons.jsonNode(Map.of("credentials", ImmutableMap.builder() + .put("client_id", "test_client_id") + .put("client_secret", "test_client_secret") + .build()))))); + linkedinAdsOAuthFlow = new LinkedinAdsOAuthFlow(configRepository, httpClient, LinkedinAdsOAuthFlowTest::getConstantState); + + } + + @Test + public void testGetSourceConcentUrl() throws IOException, ConfigNotFoundException { + final String concentUrl = + linkedinAdsOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL); + assertEquals(concentUrl, + "https://www.linkedin.com/oauth/v2/authorization?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=r_ads_reporting+r_ads+r_basicprofile&state=state"); + } + + @Test + public void testCompleteSourceOAuth() throws IOException, InterruptedException, ConfigNotFoundException { + + Map returnedCredentials = Map.of("refresh_token", "refresh_token_response"); + final HttpResponse response = mock(HttpResponse.class); + when(response.body()).thenReturn(Jsons.serialize(returnedCredentials)); + when(httpClient.send(any(), any())).thenReturn(response); + final Map queryParams = Map.of("code", "test_code"); + final Map actualQueryParams = + linkedinAdsOAuthFlow.completeSourceOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL); + + assertEquals(Jsons.serialize(Map.of("credentials", returnedCredentials)), Jsons.serialize(actualQueryParams)); + } + +} From a2e47ec2dbdc7a36ff2dcd65990208739512c13f Mon Sep 17 00:00:00 2001 From: antixar Date: Tue, 16 Nov 2021 19:30:42 +0200 Subject: [PATCH 32/36] correction of spec --- .../source-linkedin-ads/source_linkedin_ads/source.py | 2 +- .../source-linkedin-ads/source_linkedin_ads/spec.json | 4 ++-- .../java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py index f90b8fecabe37..d3d402204522f 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py @@ -312,7 +312,7 @@ def get_authenticator(cls, config: Mapping[str, Any]) -> TokenAuthenticator: 2) refresh token (TTL = 1 year) which can be converted to access tokens Every new refresh revokes all previous access tokens q """ - auth_method = config.get("credentials", {}).get("credentials") + auth_method = config.get("credentials", {}).get("auth_method") if not auth_method or auth_method == "access_token": # support of backward compatibility with old exists configs access_token = config["credentials"]["access_token"] if auth_method else config["access_token"] diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json index ba0690f5e0bac..c5b92a8af6500 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/spec.json @@ -32,7 +32,7 @@ "title": "oAuth2.0", "required": ["client_id", "client_secret", "refresh_token"], "properties": { - "credentials": { + "auth_method": { "type": "string", "const": "oAuth2.0" }, @@ -58,7 +58,7 @@ "type": "object", "required": ["access_token"], "properties": { - "credentials": { + "auth_method": { "type": "string", "const": "access_token" }, diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java index e8beae17d6fe8..4167100d28702 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java @@ -22,7 +22,7 @@ public class LinkedinAdsOAuthFlow extends BaseOAuth2Flow { private static final String AUTHORIZE_URL = "https://www.linkedin.com/oauth/v2/authorization"; private static final String ACCESS_TOKEN_URL = "https://www.linkedin.com/oauth/v2/accessToken"; - private static final String SCOPES = "r_ads_reporting r_ads r_basicprofile r_organization_social"; + private static final String SCOPES = "r_ads_reporting r_emailaddress r_liteprofile r_ads r_basicprofile r_organization_social"; public LinkedinAdsOAuthFlow(ConfigRepository configRepository, HttpClient httpClient) { super(configRepository, httpClient); From ad85ca2d568b84a8b70b24acc54518b9a076efef Mon Sep 17 00:00:00 2001 From: antixar Date: Thu, 18 Nov 2021 15:24:14 +0200 Subject: [PATCH 33/36] update tests --- .../source_linkedin_ads/source.py | 15 ++-- .../oauth/OAuthImplementationFactory.java | 1 - .../oauth/flows/LinkedinAdsOAuthFlow.java | 7 +- .../LinkedinAdsOAuthFlowIntegrationTest.java | 85 +++++++++++++++++++ .../oauth/flows/LinkedinAdsOAuthFlowTest.java | 75 ++-------------- 5 files changed, 106 insertions(+), 77 deletions(-) create mode 100644 airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py index d3d402204522f..839d7cbcc1ed1 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py @@ -9,6 +9,7 @@ import requests from airbyte_cdk import AirbyteLogger +from airbyte_cdk.models import SyncMode from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http import HttpStream @@ -333,24 +334,24 @@ def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> :: more info: https://docs.microsoft.com/linkedin/consumer/integrations/self-serve/sign-in-with-linkedin """ - header = self.get_authenticator(config).get_auth_header() - profile_url = "https://api.linkedin.com/v2/me" - + config["authenticator"] = self.get_authenticator(config) + stream = Accounts(config) + # need to load the first item only + stream.records_limit = 1 try: - response = requests.get(url=profile_url, headers=header) - response.raise_for_status() + next(stream.read_records(sync_mode=SyncMode.full_refresh), None) return True, None except requests.exceptions.RequestException as e: return False, f"{e}, {response.json().get('message')}" + except Exception as e: + return False, e def streams(self, config: Mapping[str, Any]) -> List[Stream]: """ Mapping a input config of the user input configuration as defined in the connector spec. Passing config to the streams. """ - config["authenticator"] = self.get_authenticator(config) - return [ Accounts(config), AccountUsers(config), diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 5815b46562791..03a9014359836 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -29,7 +29,6 @@ import io.airbyte.oauth.flows.google.GoogleSheetsOAuthFlow; import java.net.http.HttpClient; import java.util.Map; -import java.util.UUID; public class OAuthImplementationFactory { diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java index 4167100d28702..142f1d6b6e0d5 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlow.java @@ -6,7 +6,6 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.oauth.BaseOAuth2Flow; @@ -34,7 +33,11 @@ public LinkedinAdsOAuthFlow(ConfigRepository configRepository, final HttpClient } @Override - protected String formatConsentUrl(UUID definitionId, String clientId, String redirectUrl) throws IOException { + protected String formatConsentUrl(UUID definitionId, + String clientId, + String redirectUrl, + final JsonNode inputOAuthConfiguration) + throws IOException { try { return new URIBuilder(AUTHORIZE_URL) .addParameter("client_id", clientId) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java new file mode 100644 index 0000000000000..eb10a1d45bf23 --- /dev/null +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.when; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.SourceOAuthParameter; +import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.oauth.OAuthFlowImplementation; +import io.airbyte.validation.json.JsonValidationException; +import java.io.IOException; +import java.net.http.HttpClient; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.junit.jupiter.api.Test; + +public class LinkedinAdsOAuthFlowIntegrationTest extends OAuthFlowIntegrationTest { + + protected static final Path CREDENTIALS_PATH = Path.of("secrets/config_oauth.json"); + protected static final String REDIRECT_URL = "http://localhost:3000/auth_flow"; + + @Override + protected int getServerListeningPort() { + return 3000; + } + + @Override + protected Path getCredentialsPath() { + return CREDENTIALS_PATH; + } + + @Override + protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + return new LinkedinAdsOAuthFlow(configRepository, httpClient); + } + + @SuppressWarnings({"BusyWait", "unchecked"}) + @Test + public void testFullOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { + int limit = 20; + final UUID workspaceId = UUID.randomUUID(); + final UUID definitionId = UUID.randomUUID(); + final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); + when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() + .withOauthParameterId(UUID.randomUUID()) + .withSourceDefinitionId(definitionId) + .withWorkspaceId(workspaceId) + .withConfiguration(Jsons.jsonNode(Map.of("credentials", ImmutableMap.builder() + .put("client_id", credentialsJson.get("client_id").asText()) + .put("client_secret", credentialsJson.get("client_secret").asText()) + .build()))))); + final String url = + getFlowImplementation(configRepository, httpClient).getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); + LOGGER.info("Waiting for user consent at: {}", url); + // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing + // access... + while (!serverHandler.isSucceeded() && limit > 0) { + Thread.sleep(1000); + limit -= 1; + } + assertTrue(serverHandler.isSucceeded(), "Failed to get User consent on time"); + final Map params = flow.completeSourceOAuth(workspaceId, definitionId, + Map.of("code", serverHandler.getParamValue()), REDIRECT_URL); + + LOGGER.info("Response from completing OAuth Flow is: {}", params.toString()); + assertTrue(params.containsKey("credentials")); + final Map credentials; + credentials = Collections.unmodifiableMap((Map) params.get("credentials")); + assertTrue(credentials.containsKey("refresh_token")); + assertTrue(credentials.get("refresh_token").toString().length() > 0); + } + +} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlowTest.java index 3dc44640c669b..0958c2dfb01aa 100644 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlowTest.java +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/LinkedinAdsOAuthFlowTest.java @@ -4,77 +4,18 @@ package io.airbyte.oauth.flows; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import io.airbyte.oauth.BaseOAuthFlow; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.net.http.HttpClient; -import java.net.http.HttpResponse; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +public class LinkedinAdsOAuthFlowTest extends BaseOAuthFlowTest { -public class LinkedinAdsOAuthFlowTest { - - private UUID workspaceId; - private UUID definitionId; - private LinkedinAdsOAuthFlow linkedinAdsOAuthFlow; - private HttpClient httpClient; - - private static final String REDIRECT_URL = "https://airbyte.io"; - - private static String getConstantState() { - return "state"; - } - - @BeforeEach - public void setup() throws IOException, JsonValidationException { - workspaceId = UUID.randomUUID(); - definitionId = UUID.randomUUID(); - ConfigRepository configRepository = mock(ConfigRepository.class); - httpClient = mock(HttpClient.class); - when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() - .withOauthParameterId(UUID.randomUUID()) - .withSourceDefinitionId(definitionId) - .withWorkspaceId(workspaceId) - .withConfiguration(Jsons.jsonNode(Map.of("credentials", ImmutableMap.builder() - .put("client_id", "test_client_id") - .put("client_secret", "test_client_secret") - .build()))))); - linkedinAdsOAuthFlow = new LinkedinAdsOAuthFlow(configRepository, httpClient, LinkedinAdsOAuthFlowTest::getConstantState); - - } - - @Test - public void testGetSourceConcentUrl() throws IOException, ConfigNotFoundException { - final String concentUrl = - linkedinAdsOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL); - assertEquals(concentUrl, - "https://www.linkedin.com/oauth/v2/authorization?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=r_ads_reporting+r_ads+r_basicprofile&state=state"); + @Override + protected BaseOAuthFlow getOAuthFlow() { + return new LinkedinAdsOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); } - @Test - public void testCompleteSourceOAuth() throws IOException, InterruptedException, ConfigNotFoundException { - - Map returnedCredentials = Map.of("refresh_token", "refresh_token_response"); - final HttpResponse response = mock(HttpResponse.class); - when(response.body()).thenReturn(Jsons.serialize(returnedCredentials)); - when(httpClient.send(any(), any())).thenReturn(response); - final Map queryParams = Map.of("code", "test_code"); - final Map actualQueryParams = - linkedinAdsOAuthFlow.completeSourceOAuth(workspaceId, definitionId, queryParams, REDIRECT_URL); - - assertEquals(Jsons.serialize(Map.of("credentials", returnedCredentials)), Jsons.serialize(actualQueryParams)); + @Override + protected String getExpectedConsentUrl() { + return "https://www.linkedin.com/oauth/v2/authorization?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&scope=r_ads_reporting+r_emailaddress+r_liteprofile+r_ads+r_basicprofile+r_organization_social&state=state"; } } From e1cb2fc4acafa2c58b92f84c77f7c7634b241d7a Mon Sep 17 00:00:00 2001 From: antixar Date: Thu, 18 Nov 2021 15:44:37 +0200 Subject: [PATCH 34/36] update tests --- .../source-linkedin-ads/source_linkedin_ads/source.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py index 839d7cbcc1ed1..3c2ed1f0740e5 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py +++ b/airbyte-integrations/connectors/source-linkedin-ads/source_linkedin_ads/source.py @@ -341,8 +341,6 @@ def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> try: next(stream.read_records(sync_mode=SyncMode.full_refresh), None) return True, None - except requests.exceptions.RequestException as e: - return False, f"{e}, {response.json().get('message')}" except Exception as e: return False, e From c09d9ffe328f0f3897f7e005a695afbe21f09d7a Mon Sep 17 00:00:00 2001 From: antixar Date: Thu, 18 Nov 2021 16:28:44 +0200 Subject: [PATCH 35/36] update spec file --- airbyte-config/init/src/main/resources/seed/source_specs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index eefc469674fae..9638bfb89c45d 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -3551,7 +3551,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-mongodb-v2:0.1.3" +- dockerImage: "airbyte/source-mongodb-v2:0.1.4" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/mongodb-v2" changelogUrl: "https://docs.airbyte.io/integrations/sources/mongodb-v2" From afe1c1e97831b82a198eb9448215d6478ec0e593 Mon Sep 17 00:00:00 2001 From: antixar Date: Thu, 18 Nov 2021 16:33:40 +0200 Subject: [PATCH 36/36] update spec file --- tools/bin/ci_credentials.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index 2740bce7c60ac..e6c25027741bf 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -236,6 +236,7 @@ read_secrets source-klaviyo "$KLAVIYO_TEST_CREDS" read_secrets source-lemlist "$SOURCE_LEMLIST_TEST_CREDS" read_secrets source-lever-hiring "$LEVER_HIRING_INTEGRATION_TEST_CREDS" read_secrets source-looker "$LOOKER_INTEGRATION_TEST_CREDS" +read_secrets source-linnworks "$SOURCE_LINNWORKS_TEST_CREDS" read_secrets source-mailchimp "$MAILCHIMP_TEST_CREDS" read_secrets source-marketo "$SOURCE_MARKETO_TEST_CREDS" read_secrets source-microsoft-teams "$MICROSOFT_TEAMS_TEST_CREDS"