From 3f14922ccd70933605d5e81b968b5ed16d8dc5ae Mon Sep 17 00:00:00 2001 From: asyarif93 Date: Wed, 8 Dec 2021 14:34:58 +0700 Subject: [PATCH 01/12] feat(zendesk): Add Brands and CustomRoles --- .../schemas/brands.json | 104 +++++++ .../schemas/custom_roles.json | 280 ++++++++++++++++++ .../source_zendesk_support/source.py | 4 + .../source_zendesk_support/streams.py | 6 + 4 files changed, 394 insertions(+) create mode 100644 airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json create mode 100644 airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json new file mode 100644 index 000000000000..8d86f42465af --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json @@ -0,0 +1,104 @@ +{ + "type": [ + "null", + "object" + ], + "properties": { + "id": { + "type": [ + "null", + "integer" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "logo": { + "type": [ + "null", + "string" + ] + }, + "brand_url": { + "type": [ + "null", + "string" + ] + }, + "host_mapping": { + "type": [ + "null", + "string" + ] + }, + "subdomain": { + "type": [ + "null", + "string" + ] + }, + "url": { + "type": [ + "null", + "string" + ] + }, + "ticket_form_ids": { + "type": [ + "null", + "array" + ] + }, + "signature_template": { + "type": [ + "null", + "string" + ] + }, + "has_help_center": { + "type": [ + "null", + "boolean" + ] + }, + "help_center_state": { + "type": [ + "null", + "string" + ] + }, + "active": { + "type": [ + "null", + "boolean" + ] + }, + "default": { + "type": [ + "null", + "boolean" + ] + }, + "is_deleted": { + "type": [ + "null", + "boolean" + ] + }, + "created_at": { + "type": [ + "null", + "string" + ] + }, + "updated_at": { + "type": [ + "null", + "string" + ] + } + } +} diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json new file mode 100644 index 000000000000..9d1bdc7d90e6 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json @@ -0,0 +1,280 @@ +{ + "type": [ + "null", + "object" + ], + "properties": { + "id": { + "type": [ + "null", + "integer" + ] + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "description": { + "type": [ + "null", + "string" + ] + }, + "role_type": { + "type": [ + "null", + "integer" + ] + }, + "team_member_count": { + "type": [ + "null", + "integer" + ] + }, + "configuration": { + "properties": { + "assign_tickets_to_any_group": { + "type": [ + "null", + "boolean" + ] + }, + "chat_access": { + "type": [ + "null", + "boolean" + ] + }, + "end_user_list_access": { + "type": [ + "null", + "string" + ] + }, + "end_user_profile_access": { + "type": [ + "null", + "string" + ] + }, + "explore_access": { + "type": [ + "null", + "string" + ] + }, + "forum_access": { + "type": [ + "null", + "string" + ] + }, + "forum_access_restricted_content": { + "type": [ + "null", + "boolean" + ] + }, + "group_access": { + "type": [ + "null", + "boolean" + ] + }, + "light_agent": { + "type": [ + "null", + "boolean" + ] + }, + "macro_access": { + "type": [ + "null", + "string" + ] + }, + "manage_business_rules": { + "type": [ + "null", + "boolean" + ] + }, + "manage_contextual_workspaces": { + "type": [ + "null", + "boolean" + ] + }, + "manage_dynamic_content": { + "type": [ + "null", + "boolean" + ] + }, + "manage_extensions_and_channels": { + "type": [ + "null", + "boolean" + ] + }, + "manage_facebook": { + "type": [ + "null", + "boolean" + ] + }, + "manage_organization_fields": { + "type": [ + "null", + "boolean" + ] + }, + "manage_ticket_fields": { + "type": [ + "null", + "boolean" + ] + }, + "manage_ticket_forms": { + "type": [ + "null", + "boolean" + ] + }, + "manage_user_fields": { + "type": [ + "null", + "boolean" + ] + }, + "moderate_forums": { + "type": [ + "null", + "boolean" + ] + }, + "organization_editing": { + "type": [ + "null", + "boolean" + ] + }, + "organization_notes_editing": { + "type": [ + "null", + "boolean" + ] + }, + "report_access": { + "type": [ + "null", + "string" + ] + }, + "side_conversation_create": { + "type": [ + "null", + "boolean" + ] + }, + "ticket_access": { + "type": [ + "null", + "string" + ] + }, + "ticket_bulk_edit": { + "type": [ + "null", + "boolean" + ] + }, + "ticket_comment_access": { + "type": [ + "null", + "string" + ] + }, + "ticket_deletion": { + "type": [ + "null", + "boolean" + ] + }, + "ticket_editing": { + "type": [ + "null", + "boolean" + ] + }, + "ticket_merge": { + "type": [ + "null", + "boolean" + ] + }, + "ticket_tag_editing": { + "type": [ + "null", + "boolean" + ] + }, + "twitter_search_access": { + "type": [ + "null", + "boolean" + ] + }, + "user_view_access": { + "type": [ + "null", + "string" + ] + }, + "view_access": { + "type": [ + "null", + "string" + ] + }, + "view_deleted_tickets": { + "type": [ + "null", + "boolean" + ] + }, + "voice_access": { + "type": [ + "null", + "boolean" + ] + }, + "voice_dashboard_access": { + "type": [ + "null", + "boolean" + ] + } + }, + "type": [ + "null", + "object" + ] + }, + "created_at": { + "type": [ + "null", + "string" + ] + }, + "updated_at": { + "type": [ + "null", + "string" + ] + } + } +} diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py index 4d7cc0043517..b401a08cc919 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py @@ -11,6 +11,8 @@ from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator from .streams import ( + Brands, + CustomRoles, GroupMemberships, Groups, Macros, @@ -108,4 +110,6 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: TicketMetricEvents(**args), Tickets(**args), Users(**args), + Brands(**args), + CustomRoles(**args) ] diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index 677bc7d974de..f9de5cda5097 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -591,3 +591,9 @@ class SlaPolicies(FullRefreshStream): def path(self, *args, **kwargs) -> str: return "slas/policies.json" + +class Brands(FullRefreshStream): + """Brands stream: https://developer.zendesk.com/api-reference/ticketing/account-configuration/brands/#list-brands""" + +class CustomRoles(FullRefreshStream): + """CustomRoles stream: https://developer.zendesk.com/api-reference/ticketing/account-configuration/custom_roles/#list-custom-roles""" From dc2990927b751ba9e10fc39888e60156dd4a78d0 Mon Sep 17 00:00:00 2001 From: asyarif93 Date: Wed, 8 Dec 2021 19:15:44 +0700 Subject: [PATCH 02/12] feat(zendesk): add incremental unsorted cursor stream implement IncrementalUnsortedCursorStream to ticket_metrics --- .../source_zendesk_support/streams.py | 31 +++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index 677bc7d974de..e65c4edfb94d 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -323,6 +323,24 @@ def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> return params +class IncrementalUnsortedCursorStream(IncrementalUnsortedStream, ABC): + """Stream for loading without sorting but with cursor based pagination""" + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + has_more = response.json().get("meta").get("has_more") + if not has_more: + self._finished = True + return None + return response.json().get("meta").get("after_cursor") + + def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: + params = super().request_params(next_page_token=next_page_token, **kwargs) + params["page[size]"] = self.page_size + if next_page_token: + params["page[after]"] = next_page_token + return params + + class FullRefreshStream(IncrementalUnsortedPageStream, ABC): """ "Stream for endpoints where there are not any created_at or updated_at fields""" @@ -464,7 +482,8 @@ def parse_response( # 2) pagination and sorting mechanism # 3) cursor pagination and sorting mechanism # 4) without sorting but with pagination -# 5) without created_at/updated_at fields +# 5) without sorting but with cursor pagination +# 6) without created_at/updated_at fields # endpoints provide a built-in incremental approach @@ -546,9 +565,17 @@ class TicketForms(IncrementalUnsortedPageStream): """TicketForms stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_forms/""" -class TicketMetrics(IncrementalUnsortedPageStream): +class TicketMetrics(IncrementalUnsortedCursorStream): """TicketMetric stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metrics/""" + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + # Tickets are ordered chronologically by created date, from newest to oldest. + # No need to get next page once cursor passed initial state + if self.is_finished: + return None + + return super().next_page_token(response) + class TicketMetricEvents(IncrementalExportStream): """TicketMetricEvents stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metric_events/""" From 31793f2dcb9c39459356a41a9b242fc7d01767f5 Mon Sep 17 00:00:00 2001 From: asyarif93 Date: Wed, 8 Dec 2021 19:17:38 +0700 Subject: [PATCH 03/12] feat(zendesk): use sorted cursor pagination for ticket comments and macros --- .../source_zendesk_support/streams.py | 39 +++++++++++-------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index e65c4edfb94d..b7abf9942589 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -348,22 +348,15 @@ class FullRefreshStream(IncrementalUnsortedPageStream, ABC): cursor_field = SourceZendeskSupportStream.cursor_field -class IncrementalSortedCursorStream(IncrementalUnsortedStream, ABC): +class IncrementalSortedCursorStream(IncrementalUnsortedCursorStream, ABC): """Stream for loading sorting data with cursor based pagination""" - def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(next_page_token=next_page_token, **kwargs) - params.update({"sort_by": self.cursor_field, "sort_order": "desc", "limit": self.page_size}) - - if next_page_token: - params["cursor"] = next_page_token + def request_params(self, **kwargs) -> MutableMapping[str, Any]: + params = super().request_params(**kwargs) + if params: + params.update({"sort_by": self.cursor_field, "sort_order": "desc"}) return params - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - if self.is_finished: - return None - return response.json().get("before_cursor") - class IncrementalSortedPageStream(IncrementalUnsortedPageStream, ABC): """Stream for loading sorting data with normal pagination""" @@ -375,7 +368,7 @@ def request_params(self, **kwargs) -> MutableMapping[str, Any]: return params -class TicketComments(IncrementalSortedPageStream): +class TicketComments(IncrementalSortedCursorStream): """TicketComments stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_comments/ ZenDesk doesn't provide API for loading of all comments by one direct endpoints. Thus at first we loads all updated tickets and after this tries to load all created/updated @@ -386,7 +379,7 @@ class TicketComments(IncrementalSortedPageStream): raise_on_http_errors = False response_list_name = "comments" - cursor_field = IncrementalSortedPageStream.created_at_field + cursor_field = IncrementalSortedCursorStream.created_at_field def __init__(self, **kwargs): super().__init__(**kwargs) @@ -583,14 +576,14 @@ class TicketMetricEvents(IncrementalExportStream): cursor_field = "time" -class Macros(IncrementalSortedPageStream): +class Macros(IncrementalSortedCursorStream): """Macros stream: https://developer.zendesk.com/api-reference/ticketing/business-rules/macros/""" # endpoints provide a cursor pagination and sorting mechanism -class TicketAudits(IncrementalSortedCursorStream): +class TicketAudits(IncrementalUnsortedStream): """TicketAudits stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_audits/""" # can request a maximum of 1,000 results @@ -601,6 +594,20 @@ class TicketAudits(IncrementalSortedCursorStream): # Root of response is 'audits'. As rule as an endpoint name is equal a response list name response_list_name = "audits" + # This endpoint uses a variant of cursor pagination with some differences from cursor pagination used in other endpoints. + def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: + params = super().request_params(next_page_token=next_page_token, **kwargs) + params.update({"sort_by": self.cursor_field, "sort_order": "desc", "limit": self.page_size}) + + if next_page_token: + params["cursor"] = next_page_token + return params + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + if self.is_finished: + return None + return response.json().get("before_cursor") + # endpoints don't provide the updated_at/created_at fields # thus we can't implement an incremental logic for them From db82eea35a2c3580a29646206bc2c9fa918e17e8 Mon Sep 17 00:00:00 2001 From: asyarif93 Date: Wed, 8 Dec 2021 19:18:36 +0700 Subject: [PATCH 04/12] feat(zendesk): use unsorted cursor stream for groups, group memberships and satisfaction ratings --- .../source_zendesk_support/streams.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index b7abf9942589..67d567673949 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -518,15 +518,15 @@ def get_last_end_time(self) -> Optional[Union[str, int]]: # endpoints provide a pagination mechanism but we can't manage a response order -class Groups(IncrementalUnsortedPageStream): +class Groups(IncrementalUnsortedCursorStream): """Groups stream: https://developer.zendesk.com/api-reference/ticketing/groups/groups/""" -class GroupMemberships(IncrementalUnsortedPageStream): +class GroupMemberships(IncrementalUnsortedCursorStream): """GroupMemberships stream: https://developer.zendesk.com/api-reference/ticketing/groups/group_memberships/""" -class SatisfactionRatings(IncrementalUnsortedPageStream): +class SatisfactionRatings(IncrementalUnsortedCursorStream): """SatisfactionRatings stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/satisfaction_ratings/ The ZenDesk API for this stream provides the filter "start_time" that can be used for incremental logic From 47697b0b60aafa2d15e300b418657f953c05c205 Mon Sep 17 00:00:00 2001 From: asyarif93 Date: Wed, 8 Dec 2021 19:32:17 +0700 Subject: [PATCH 05/12] fix(zendesk): use safe method to get value from nested dict --- .../source-zendesk-support/source_zendesk_support/streams.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index 67d567673949..76e4e85a1c66 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -327,11 +327,11 @@ class IncrementalUnsortedCursorStream(IncrementalUnsortedStream, ABC): """Stream for loading without sorting but with cursor based pagination""" def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - has_more = response.json().get("meta").get("has_more") + has_more = response.json().get("meta", {}).get("has_more") if not has_more: self._finished = True return None - return response.json().get("meta").get("after_cursor") + return response.json().get("meta", {}).get("after_cursor") def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: params = super().request_params(next_page_token=next_page_token, **kwargs) From dcdacaf4ab3fe65edadeca29dbb440647f8a1017 Mon Sep 17 00:00:00 2001 From: asyarif93 Date: Thu, 9 Dec 2021 09:33:44 +0700 Subject: [PATCH 06/12] style(zendesk): reformat using gradlew --- .../schemas/brands.json | 151 +++---- .../schemas/custom_roles.json | 414 ++++++------------ .../source_zendesk_support/source.py | 2 +- .../source_zendesk_support/streams.py | 2 + 4 files changed, 191 insertions(+), 378 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json index 8d86f42465af..93becf2a3514 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json @@ -1,104 +1,53 @@ { - "type": [ - "null", - "object" - ], - "properties": { - "id": { - "type": [ - "null", - "integer" - ] - }, - "name": { - "type": [ - "null", - "string" - ] - }, - "logo": { - "type": [ - "null", - "string" - ] - }, - "brand_url": { - "type": [ - "null", - "string" - ] - }, - "host_mapping": { - "type": [ - "null", - "string" - ] - }, - "subdomain": { - "type": [ - "null", - "string" - ] - }, - "url": { - "type": [ - "null", - "string" - ] - }, - "ticket_form_ids": { - "type": [ - "null", - "array" - ] - }, - "signature_template": { - "type": [ - "null", - "string" - ] - }, - "has_help_center": { - "type": [ - "null", - "boolean" - ] - }, - "help_center_state": { - "type": [ - "null", - "string" - ] - }, - "active": { - "type": [ - "null", - "boolean" - ] - }, - "default": { - "type": [ - "null", - "boolean" - ] - }, - "is_deleted": { - "type": [ - "null", - "boolean" - ] - }, - "created_at": { - "type": [ - "null", - "string" - ] - }, - "updated_at": { - "type": [ - "null", - "string" - ] - } + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "logo": { + "type": ["null", "string"] + }, + "brand_url": { + "type": ["null", "string"] + }, + "host_mapping": { + "type": ["null", "string"] + }, + "subdomain": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + }, + "ticket_form_ids": { + "type": ["null", "array"] + }, + "signature_template": { + "type": ["null", "string"] + }, + "has_help_center": { + "type": ["null", "boolean"] + }, + "help_center_state": { + "type": ["null", "string"] + }, + "active": { + "type": ["null", "boolean"] + }, + "default": { + "type": ["null", "boolean"] + }, + "is_deleted": { + "type": ["null", "boolean"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] } + } } diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json index 9d1bdc7d90e6..b366721718cb 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json @@ -1,280 +1,142 @@ { - "type": [ - "null", - "object" - ], - "properties": { - "id": { - "type": [ - "null", - "integer" - ] - }, - "name": { - "type": [ - "null", - "string" - ] - }, - "description": { - "type": [ - "null", - "string" - ] - }, - "role_type": { - "type": [ - "null", - "integer" - ] - }, - "team_member_count": { - "type": [ - "null", - "integer" - ] - }, - "configuration": { - "properties": { - "assign_tickets_to_any_group": { - "type": [ - "null", - "boolean" - ] - }, - "chat_access": { - "type": [ - "null", - "boolean" - ] - }, - "end_user_list_access": { - "type": [ - "null", - "string" - ] - }, - "end_user_profile_access": { - "type": [ - "null", - "string" - ] - }, - "explore_access": { - "type": [ - "null", - "string" - ] - }, - "forum_access": { - "type": [ - "null", - "string" - ] - }, - "forum_access_restricted_content": { - "type": [ - "null", - "boolean" - ] - }, - "group_access": { - "type": [ - "null", - "boolean" - ] - }, - "light_agent": { - "type": [ - "null", - "boolean" - ] - }, - "macro_access": { - "type": [ - "null", - "string" - ] - }, - "manage_business_rules": { - "type": [ - "null", - "boolean" - ] - }, - "manage_contextual_workspaces": { - "type": [ - "null", - "boolean" - ] - }, - "manage_dynamic_content": { - "type": [ - "null", - "boolean" - ] - }, - "manage_extensions_and_channels": { - "type": [ - "null", - "boolean" - ] - }, - "manage_facebook": { - "type": [ - "null", - "boolean" - ] - }, - "manage_organization_fields": { - "type": [ - "null", - "boolean" - ] - }, - "manage_ticket_fields": { - "type": [ - "null", - "boolean" - ] - }, - "manage_ticket_forms": { - "type": [ - "null", - "boolean" - ] - }, - "manage_user_fields": { - "type": [ - "null", - "boolean" - ] - }, - "moderate_forums": { - "type": [ - "null", - "boolean" - ] - }, - "organization_editing": { - "type": [ - "null", - "boolean" - ] - }, - "organization_notes_editing": { - "type": [ - "null", - "boolean" - ] - }, - "report_access": { - "type": [ - "null", - "string" - ] - }, - "side_conversation_create": { - "type": [ - "null", - "boolean" - ] - }, - "ticket_access": { - "type": [ - "null", - "string" - ] - }, - "ticket_bulk_edit": { - "type": [ - "null", - "boolean" - ] - }, - "ticket_comment_access": { - "type": [ - "null", - "string" - ] - }, - "ticket_deletion": { - "type": [ - "null", - "boolean" - ] - }, - "ticket_editing": { - "type": [ - "null", - "boolean" - ] - }, - "ticket_merge": { - "type": [ - "null", - "boolean" - ] - }, - "ticket_tag_editing": { - "type": [ - "null", - "boolean" - ] - }, - "twitter_search_access": { - "type": [ - "null", - "boolean" - ] - }, - "user_view_access": { - "type": [ - "null", - "string" - ] - }, - "view_access": { - "type": [ - "null", - "string" - ] - }, - "view_deleted_tickets": { - "type": [ - "null", - "boolean" - ] - }, - "voice_access": { - "type": [ - "null", - "boolean" - ] - }, - "voice_dashboard_access": { - "type": [ - "null", - "boolean" - ] - } - }, - "type": [ - "null", - "object" - ] - }, - "created_at": { - "type": [ - "null", - "string" - ] - }, - "updated_at": { - "type": [ - "null", - "string" - ] + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "role_type": { + "type": ["null", "integer"] + }, + "team_member_count": { + "type": ["null", "integer"] + }, + "configuration": { + "properties": { + "assign_tickets_to_any_group": { + "type": ["null", "boolean"] + }, + "chat_access": { + "type": ["null", "boolean"] + }, + "end_user_list_access": { + "type": ["null", "string"] + }, + "end_user_profile_access": { + "type": ["null", "string"] + }, + "explore_access": { + "type": ["null", "string"] + }, + "forum_access": { + "type": ["null", "string"] + }, + "forum_access_restricted_content": { + "type": ["null", "boolean"] + }, + "group_access": { + "type": ["null", "boolean"] + }, + "light_agent": { + "type": ["null", "boolean"] + }, + "macro_access": { + "type": ["null", "string"] + }, + "manage_business_rules": { + "type": ["null", "boolean"] + }, + "manage_contextual_workspaces": { + "type": ["null", "boolean"] + }, + "manage_dynamic_content": { + "type": ["null", "boolean"] + }, + "manage_extensions_and_channels": { + "type": ["null", "boolean"] + }, + "manage_facebook": { + "type": ["null", "boolean"] + }, + "manage_organization_fields": { + "type": ["null", "boolean"] + }, + "manage_ticket_fields": { + "type": ["null", "boolean"] + }, + "manage_ticket_forms": { + "type": ["null", "boolean"] + }, + "manage_user_fields": { + "type": ["null", "boolean"] + }, + "moderate_forums": { + "type": ["null", "boolean"] + }, + "organization_editing": { + "type": ["null", "boolean"] + }, + "organization_notes_editing": { + "type": ["null", "boolean"] + }, + "report_access": { + "type": ["null", "string"] + }, + "side_conversation_create": { + "type": ["null", "boolean"] + }, + "ticket_access": { + "type": ["null", "string"] + }, + "ticket_bulk_edit": { + "type": ["null", "boolean"] + }, + "ticket_comment_access": { + "type": ["null", "string"] + }, + "ticket_deletion": { + "type": ["null", "boolean"] + }, + "ticket_editing": { + "type": ["null", "boolean"] + }, + "ticket_merge": { + "type": ["null", "boolean"] + }, + "ticket_tag_editing": { + "type": ["null", "boolean"] + }, + "twitter_search_access": { + "type": ["null", "boolean"] + }, + "user_view_access": { + "type": ["null", "string"] + }, + "view_access": { + "type": ["null", "string"] + }, + "view_deleted_tickets": { + "type": ["null", "boolean"] + }, + "voice_access": { + "type": ["null", "boolean"] + }, + "voice_dashboard_access": { + "type": ["null", "boolean"] } + }, + "type": ["null", "object"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] } + } } diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py index b401a08cc919..b04415a20d5a 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py @@ -111,5 +111,5 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: Tickets(**args), Users(**args), Brands(**args), - CustomRoles(**args) + CustomRoles(**args), ] diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index f9de5cda5097..17438ed11628 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -592,8 +592,10 @@ class SlaPolicies(FullRefreshStream): def path(self, *args, **kwargs) -> str: return "slas/policies.json" + class Brands(FullRefreshStream): """Brands stream: https://developer.zendesk.com/api-reference/ticketing/account-configuration/brands/#list-brands""" + class CustomRoles(FullRefreshStream): """CustomRoles stream: https://developer.zendesk.com/api-reference/ticketing/account-configuration/custom_roles/#list-custom-roles""" From 57301efdcd6535f0c628f8d5e9687deacb89c462 Mon Sep 17 00:00:00 2001 From: asyarif93 Date: Mon, 13 Dec 2021 14:06:22 +0700 Subject: [PATCH 07/12] fix(zendesk): format created_at and updated_at to date-time format --- .../source_zendesk_support/schemas/brands.json | 6 ++++-- .../source_zendesk_support/schemas/custom_roles.json | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json index 93becf2a3514..fe8da6460d69 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/brands.json @@ -44,10 +44,12 @@ "type": ["null", "boolean"] }, "created_at": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "updated_at": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" } } } diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json index b366721718cb..df6a9b40fb64 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/custom_roles.json @@ -133,10 +133,12 @@ "type": ["null", "object"] }, "created_at": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "updated_at": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" } } } From 29a14d6badb50c8beda7ea24721707bf491d6bf1 Mon Sep 17 00:00:00 2001 From: asyarif93 Date: Mon, 13 Dec 2021 14:07:45 +0700 Subject: [PATCH 08/12] feat(zendesk): add business hours schedule --- .../schemas/schedules.json | 36 +++++++++++++++++++ .../source_zendesk_support/source.py | 2 ++ .../source_zendesk_support/streams.py | 7 ++++ 3 files changed, 45 insertions(+) create mode 100644 airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/schedules.json diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/schedules.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/schedules.json new file mode 100644 index 000000000000..f0a0526590f7 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/schedules.json @@ -0,0 +1,36 @@ +{ + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "intervals": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "start_time": { + "type": ["null", "integer"] + }, + "end_time": { + "type": ["null", "integer"] + } + } + } + }, + "time_zone": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py index b04415a20d5a..06b635801f65 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py @@ -18,6 +18,7 @@ Macros, Organizations, SatisfactionRatings, + Schedules, SlaPolicies, SourceZendeskException, Tags, @@ -112,4 +113,5 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: Users(**args), Brands(**args), CustomRoles(**args), + Schedules(**args), ] diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index 17438ed11628..ba73c653c723 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -599,3 +599,10 @@ class Brands(FullRefreshStream): class CustomRoles(FullRefreshStream): """CustomRoles stream: https://developer.zendesk.com/api-reference/ticketing/account-configuration/custom_roles/#list-custom-roles""" + + +class Schedules(FullRefreshStream): + """Schedules stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/schedules/#list-schedules""" + + def path(self, *args, **kwargs) -> str: + return "business_hours/schedules.json" From 8b64245037e8c8bfcb884b3dab24ad027b7b7c1a Mon Sep 17 00:00:00 2001 From: Marcos Marx Date: Fri, 17 Dec 2021 17:25:31 -0300 Subject: [PATCH 09/12] bump connector version --- .../79c1aa37-dae3-42ae-b333-d1c105477715.json | 2 +- .../init/src/main/resources/seed/source_definitions.yaml | 2 +- airbyte-config/init/src/main/resources/seed/source_specs.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json index cbe439b98dd8..d87c3137c48a 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "79c1aa37-dae3-42ae-b333-d1c105477715", "name": "Zendesk Support", "dockerRepository": "airbyte/source-zendesk-support", - "dockerImageTag": "0.1.8", + "dockerImageTag": "0.1.10", "documentationUrl": "https://docs.airbyte.io/integrations/sources/zendesk-support", "icon": "zendesk.svg" } diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index f8a5e71c10e8..f06bf54d75b4 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -721,7 +721,7 @@ - name: Zendesk Support sourceDefinitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 dockerRepository: airbyte/source-zendesk-support - dockerImageTag: 0.1.8 + dockerImageTag: 0.1.10 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-support icon: zendesk.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index f29c9587ffb0..3ef87ad51e45 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -6755,7 +6755,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-zendesk-support:0.1.8" +- dockerImage: "airbyte/source-zendesk-support:0.1.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/zendesk-support" connectionSpecification: From 147b96949829b6da7b8f0e769d68c0dbb7adb817 Mon Sep 17 00:00:00 2001 From: Marcos Marx Date: Fri, 17 Dec 2021 17:26:23 -0300 Subject: [PATCH 10/12] bump dockerfile version --- .../connectors/source-zendesk-support/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile index 773faccbde1d..c0a06babc403 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile +++ b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile @@ -25,5 +25,5 @@ COPY source_zendesk_support ./source_zendesk_support ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.8 +LABEL io.airbyte.version=0.1.10 LABEL io.airbyte.name=airbyte/source-zendesk-support From 6810f7d313184c7b11ca76a1fb124b56d456556e Mon Sep 17 00:00:00 2001 From: Marcos Marx Date: Fri, 17 Dec 2021 19:08:58 -0300 Subject: [PATCH 11/12] reset --- .bumpversion.cfg | 2 +- .env | 2 +- .github/workflows/gradle.yml | 15 +- .github/workflows/release-airbyte-os.yml | 8 +- .../workflows/test-performance-command.yml | 4 + airbyte-bootloader/Dockerfile | 4 +- airbyte-cdk/python/CHANGELOG.md | 3 + .../sources/streams/http/rate_limiting.py | 4 + airbyte-cdk/python/setup.py | 2 +- .../424892c4-daac-4491-b35d-c6688ba547ba.json | 2 +- .../200330b2-ea62-4d11-ac6d-cfe3e3f8ab2b.json | 2 +- .../36c891d9-4bd9-43ac-bad2-10e12756272c.json | 2 +- .../445831eb-78db-4b1f-8f1f-0d96ad8739e2.json | 2 +- .../47f25999-dd5e-4636-8c39-e7cea2453331.json | 2 +- .../79c1aa37-dae3-42ae-b333-d1c105477715.json | 2 +- .../7a4327c4-315a-11ec-8d3d-0242ac130003.json | 2 +- .../80a54ea2-9959-4040-aac1-eee42423ec9b.json | 4 +- .../9da77001-af33-4bcd-be46-6252bf9342b9.json | 2 +- .../bad83517-5e54-4a3d-9b53-63e85fbd4d7c.json | 2 +- .../d8540a80-6120-485d-b7d6-272bca477d9b.json | 8 + .../d913b0f2-cc51-4e55-a44c-8ba1697b9239.json | 2 +- .../e55879a8-0ef8-4557-abcf-ab34c53ec460.json | 2 +- .../e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json | 2 +- .../fe2b4084-3386-4d3b-9ad6-308f61a6f1e6.json | 2 +- .../seed/destination_definitions.yaml | 10 +- .../resources/seed/destination_specs.yaml | 52 +- .../resources/seed/source_definitions.yaml | 52 +- .../src/main/resources/seed/source_specs.yaml | 499 +++++-- .../split_secrets/JsonSecretsProcessor.java | 6 +- .../JsonSecretsProcessorTest.java | 92 +- .../cypress/integration/destination.spec.js | 2 +- .../cypress/integration/onboarding.spec.js | 6 +- .../cypress/integration/source.spec.js | 2 +- .../cypress/support/commands/common.js | 4 +- .../BufferedStreamConsumer.java | 6 +- .../base-standard-source-test-file/Dockerfile | 4 +- .../bases/source-acceptance-test/CHANGELOG.md | 3 + .../bases/source-acceptance-test/Dockerfile | 2 +- .../source_acceptance_test/tests/test_core.py | 11 + .../utils/connector_runner.py | 33 +- .../unit_tests/test_core.py | 121 ++ .../unit_tests/test_utils.py | 85 ++ .../bases/standard-source-test/Dockerfile | 4 +- .../source/AbstractSourceConnectorTest.java | 4 +- airbyte-integrations/builds.md | 1 + .../destination-java/Dockerfile.hbs | 4 +- .../source-java-jdbc/Dockerfile | 4 +- .../destination-azure-blob-storage/Dockerfile | 4 +- .../Dockerfile | 6 +- .../src/main/resources/spec.json | 14 +- .../destination-bigquery/Dockerfile | 6 +- .../src/main/resources/spec.json | 14 +- .../destination-cassandra/Dockerfile | 4 +- .../connectors/destination-csv/Dockerfile | 4 +- .../destination-databricks/Dockerfile | 4 +- .../databricks/DatabricksStreamCopier.java | 6 +- .../destination-dynamodb/Dockerfile | 4 +- .../destination-e2e-test/Dockerfile | 6 +- .../destination-elasticsearch/Dockerfile | 4 +- .../connectors/destination-gcs/Dockerfile | 4 +- .../destination/gcs/avro/GcsAvroWriter.java | 8 +- .../destination/gcs/csv/GcsCsvWriter.java | 8 +- .../destination/gcs/jsonl/GcsJsonlWriter.java | 8 +- .../gcs/parquet/GcsParquetWriter.java | 10 +- .../gcs/avro/GcsAvroWriterTest.java | 46 + .../connectors/destination-jdbc/Dockerfile | 4 +- .../connectors/destination-jdbc/build.gradle | 1 + .../jdbc/copy/CopyConsumerFactory.java | 2 +- .../destination/jdbc/copy/StreamCopier.java | 3 +- .../jdbc/copy/s3/LegacyS3StreamCopier.java | 235 ++++ .../copy/s3/LegacyS3StreamCopierFactory.java | 61 + .../jdbc/copy/s3/S3CopyConfig.java | 25 + .../jdbc/copy/s3/S3StreamCopier.java | 171 +-- .../jdbc/copy/s3/S3StreamCopierFactory.java | 28 +- .../copy/s3/LegacyS3StreamCopierTest.java | 280 ++++ .../jdbc/copy/s3/S3CopyConfigTest.java | 37 + .../jdbc/copy/s3/S3StreamCopierTest.java | 282 ++++ .../connectors/destination-kafka/Dockerfile | 4 +- .../connectors/destination-keen/Dockerfile | 4 +- .../connectors/destination-kinesis/Dockerfile | 4 +- .../destination-local-json/Dockerfile | 4 +- .../Dockerfile | 4 +- ...bColumnstoreDestinationAcceptanceTest.java | 2 +- ...bColumnstoreDestinationAcceptanceTest.java | 2 +- .../destination-meilisearch/Dockerfile | 4 +- .../Dockerfile | 4 +- .../connectors/destination-mongodb/Dockerfile | 4 +- .../connectors/destination-mqtt/Dockerfile | 4 +- .../Dockerfile | 4 +- .../connectors/destination-mssql/Dockerfile | 4 +- .../Dockerfile | 4 +- .../connectors/destination-mysql/Dockerfile | 4 +- .../Dockerfile | 4 +- .../connectors/destination-oracle/Dockerfile | 4 +- .../Dockerfile | 4 +- .../destination-postgres/Dockerfile | 4 +- .../connectors/destination-pubsub/Dockerfile | 4 +- .../connectors/destination-pulsar/Dockerfile | 4 +- .../connectors/destination-redis/Dockerfile | 4 +- .../destination-redshift/Dockerfile | 6 +- .../destination-redshift/build.gradle | 1 + .../redshift/RedshiftCopyS3Destination.java | 3 +- .../redshift/RedshiftStreamCopier.java | 67 +- .../redshift/RedshiftStreamCopierFactory.java | 17 +- .../src/main/resources/spec.json | 12 + .../redshift/RedshiftStreamCopierTest.java | 159 +++ .../rockset/RocksetDestination.java | 112 +- .../destination/rockset/RocksetUtils.java | 422 +++--- .../rockset/RocksetWriteApiConsumer.java | 307 ++-- .../RocksetDestinationAcceptanceTest.java | 266 ++-- .../connectors/destination-s3/Dockerfile | 6 +- .../connectors/destination-s3/build.gradle | 1 + .../destination/s3/S3DestinationConfig.java | 41 +- .../s3/avro/JsonToAvroSchemaConverter.java | 2 +- .../destination/s3/avro/S3AvroWriter.java | 11 +- .../destination/s3/csv/S3CsvFormatConfig.java | 29 +- .../destination/s3/csv/S3CsvWriter.java | 105 +- .../csv/StagingDatabaseCsvSheetGenerator.java | 44 + .../destination/s3/jsonl/S3JsonlWriter.java | 11 +- .../s3/parquet/S3ParquetWriter.java | 11 +- .../destination/s3/writer/BaseS3Writer.java | 20 +- .../s3/writer/ProductionWriterFactory.java | 2 +- .../destination/s3/writer/S3Writer.java | 7 + .../destination/s3/csv/S3CsvWriterTest.java | 326 +++++ .../connectors/destination-scylla/Dockerfile | 4 +- .../destination-snowflake/Dockerfile | 6 +- .../destination-snowflake/build.gradle | 2 +- .../snowflake/SnowflakeDatabase.java | 2 + .../SnowflakeInternalStagingDestination.java | 33 + .../snowflake/SnowflakeS3StreamCopier.java | 11 +- .../SnowflakeS3StreamCopierFactory.java | 4 +- .../source-amazon-seller-partner/Dockerfile | 2 +- .../integration_tests/configured_catalog.json | 9 + .../integration_tests/sample_state.json | 3 + ...T_BRAND_ANALYTICS_SEARCH_TERMS_REPORT.json | 29 + .../source_amazon_seller_partner/source.py | 8 + .../source_amazon_seller_partner/spec.json | 9 + .../source_amazon_seller_partner/streams.py | 76 +- .../test_repots_streams_rate_limits.py | 1 + .../connectors/source-bigquery/Dockerfile | 4 +- .../connectors/source-bing-ads/Dockerfile | 2 +- .../source-bing-ads/source_bing_ads/spec.json | 24 +- .../Dockerfile | 4 +- .../connectors/source-clickhouse/Dockerfile | 6 +- .../src/main/resources/spec.json | 15 +- .../Dockerfile | 4 +- .../connectors/source-cockroachdb/Dockerfile | 4 +- .../source-db2-strict-encrypt/Dockerfile | 4 +- .../connectors/source-db2/Dockerfile | 4 +- .../connectors/source-drift/Dockerfile | 2 +- .../source-drift/source_drift/spec.json | 2 +- .../connectors/source-e2e-test/Dockerfile | 6 +- .../source/e2e_test/InfiniteFeedSource.java | 15 +- .../src/main/resources/spec.json | 4 +- .../source-facebook-marketing/Dockerfile | 2 +- .../integration_tests/spec.json | 10 +- .../source_facebook_marketing/api.py | 12 +- .../schemas/ad_creatives.json | 3 + .../ads_insights_action_breakdowns.json | 14 + .../schemas/ads_insights_breakdowns.json | 10 - .../schemas/campaigns.json | 105 +- .../source_facebook_marketing/source.py | 23 +- .../source_facebook_marketing/streams.py | 60 +- .../connectors/source-file-secure/setup.py | 2 +- .../source-google-analytics-v4/Dockerfile | 2 +- .../acceptance-test-config.yml | 2 + .../integration_tests/expected_records.txt | 6 + .../source_google_analytics_v4/source.py | 53 +- .../connectors/source-harvest/Dockerfile | 2 +- .../source-harvest/source_harvest/spec.json | 14 +- .../connectors/source-hubspot/Dockerfile | 2 +- .../source-hubspot/acceptance-test-config.yml | 16 +- .../integration_tests/abnormal_state.json | 28 +- .../sample_files/configured_catalog.json | 72 +- .../sample_files/sample_state.json | 28 +- .../source-hubspot/source_hubspot/api.py | 26 +- .../source-hubspot/source_hubspot/client.py | 14 +- .../source_hubspot/schemas/deals.json | 6 +- .../source-hubspot/source_hubspot/spec.json | 18 +- .../connectors/source-intercom/Dockerfile | 2 +- .../source-intercom/source_intercom/spec.json | 5 +- .../connectors/source-jdbc/Dockerfile | 4 +- .../connectors/source-jdbc/build.gradle | 2 +- .../connectors/source-kafka/Dockerfile | 4 +- .../connectors/source-linnworks/Dockerfile | 2 +- .../integration_tests/abnormal_state.json | 5 +- .../integration_tests/catalog.json | 921 +++++++++++- .../integration_tests/configured_catalog.json | 925 +++++++++++- .../integration_tests/sample_state.json | 5 +- .../schemas/processed_order_details.json | 8 +- .../schemas/processed_orders.json | 36 +- .../source_linnworks/schemas/stock_items.json | 12 +- .../schemas/stock_locations.json | 2 +- .../source_linnworks/spec.json | 2 +- .../source_linnworks/streams.py | 17 +- .../unit_tests/test_incremental_streams.py | 47 +- .../unit_tests/test_streams.py | 38 +- .../source-microsoft-teams/Dockerfile | 2 +- .../source_microsoft_teams/spec.json | 19 +- .../connectors/source-monday/Dockerfile | 2 +- .../source-monday/source_monday/spec.json | 5 +- .../source-mongodb-strict-encrypt/Dockerfile | 4 +- .../connectors/source-mongodb-v2/Dockerfile | 4 +- .../source-mssql-strict-encrypt/Dockerfile | 4 +- .../connectors/source-mssql/Dockerfile | 4 +- .../connectors/source-mssql/README.md | 11 + .../source-mysql-strict-encrypt/Dockerfile | 4 +- .../connectors/source-mysql/Dockerfile | 4 +- .../connectors/source-mysql/README.md | 9 + .../source-openweather/.dockerignore | 7 + .../connectors/source-openweather/Dockerfile | 38 + .../connectors/source-openweather/README.md | 132 ++ .../acceptance-test-config.yml | 18 + .../acceptance-test-docker.sh | 16 + .../source-openweather/bootstrap.md | 16 + .../source-openweather/build.gradle | 13 + .../integration_tests/__init__.py | 3 + .../integration_tests/abnormal_state.json | 5 + .../integration_tests/acceptance.py | 14 + .../integration_tests/catalog.json | 12 + .../integration_tests/configured_catalog.json | 13 + .../integration_tests/invalid_config.json | 5 + .../integration_tests/sample_config.json | 5 + .../integration_tests/sample_state.json | 7 + .../connectors/source-openweather/main.py | 13 + .../source-openweather/requirements.txt | 2 + .../connectors/source-openweather/setup.py | 29 + .../source_openweather/__init__.py | 8 + .../source_openweather/extra_validations.py | 33 + .../source_openweather/schemas/one_call.json | 85 ++ .../source_openweather/source.py | 46 + .../source_openweather/spec.json | 96 ++ .../source_openweather/streams.py | 52 + .../source-openweather/unit_tests/__init__.py | 3 + .../unit_tests/test_extra_validations.py | 64 + .../unit_tests/test_source.py | 58 + .../unit_tests/test_streams.py | 68 + .../source-oracle-strict-encrypt/Dockerfile | 4 +- .../source-paypal-transaction/Dockerfile | 2 +- .../source-paypal-transaction/README.md | 3 +- .../source_paypal_transaction/source.py | 54 +- .../source-postgres-strict-encrypt/Dockerfile | 4 +- .../connectors/source-postgres/Dockerfile | 4 +- .../connectors/source-postgres/README.md | 10 + .../connectors/source-redshift/Dockerfile | 4 +- .../source-relational-db/Dockerfile | 4 +- .../source-scaffold-java-jdbc/Dockerfile | 4 +- .../connectors/source-shopify/Dockerfile | 2 +- .../integration_tests/abnormal_state.json | 4 +- .../integration_tests/configured_catalog.json | 4 +- .../integration_tests/state.json | 2 +- .../schemas/abandoned_checkouts.json | 342 +---- .../source_shopify/schemas/draft_orders.json | 1239 ++++++----------- .../source_shopify/schemas/metafields.json | 2 +- ...orders_refunds.json => order_refunds.json} | 73 + .../{orders_risks.json => order_risks.json} | 0 .../source_shopify/schemas/orders.json | 52 +- .../source_shopify/schemas/shop.json | 2 +- .../source-shopify/source_shopify/source.py | 75 +- .../source-shopify/source_shopify/spec.json | 4 +- .../source_shopify/transform.py | 6 + .../source-snapchat-marketing/Dockerfile | 2 +- .../source_snapchat_marketing/spec.json | 10 +- .../connectors/source-snowflake/Dockerfile | 4 +- .../connectors/source-strava/Dockerfile | 2 +- .../source-strava/source_strava/spec.json | 49 + .../source-zendesk-support/Dockerfile | 2 +- .../source_zendesk_support/streams.py | 76 +- .../oauth/OAuthImplementationFactory.java | 2 + .../airbyte/oauth/flows/StravaOAuthFlow.java | 89 ++ .../oauth/flows/StravaOAuthFlowTest.java | 21 + airbyte-scheduler/app/Dockerfile | 4 +- airbyte-server/Dockerfile | 4 +- .../server/handlers/DestinationHandler.java | 2 +- .../server/handlers/SourceHandler.java | 5 +- .../test/acceptance/AcceptanceTests.java | 95 +- airbyte-webapp/.storybook/withProvider.tsx | 30 +- airbyte-webapp/package-lock.json | 519 +++---- airbyte-webapp/package.json | 14 +- airbyte-webapp/src/App.tsx | 73 +- .../ApiErrorBoundary/ApiErrorBoundary.tsx | 2 +- .../CreateConnectionContent.tsx | 3 +- .../components/ConnectionSettingsCell.tsx | 10 +- .../src/components/EntityTable/utils.tsx | 10 +- .../src/components/SideMenu/SideMenu.tsx | 2 +- .../src/config/ConfigServiceProvider.tsx | 11 +- airbyte-webapp/src/constants/constants.ts | 3 - .../{defaultServices.tsx => ApiServices.tsx} | 8 +- .../src/core/domain/connection/types.ts | 3 +- .../connector/DestinationDefinitionService.ts | 2 +- .../connector/SourceDefinitionService.ts | 2 +- .../src/core/domain/connector/connector.ts | 4 +- .../src/core/domain/connector/constants.ts | 1 + .../src/core/domain/connector/destination.ts | 3 +- .../src/core/domain/connector/source.ts | 2 +- .../src/core/domain/connector/types.ts | 45 +- .../src/core/domain/workspace/Workspace.ts | 22 + .../src/core/resources/Connection.ts | 3 +- .../src/core/resources/Destination.tsx | 10 +- .../core/resources/DestinationDefinition.ts | 11 +- airbyte-webapp/src/core/resources/Source.tsx | 10 +- .../src/core/resources/SourceDefinition.ts | 14 +- .../src/core/resources/Workspace.ts | 36 +- .../services/Analytics/TrackPageAnalytics.tsx | 3 +- .../services/Analytics/pageNameUtils.tsx | 33 +- .../Analytics/useAnalyticsService.tsx | 12 +- .../services/Onboarding/OnboardingService.tsx | 5 +- .../src/hooks/services/useConnectionHook.tsx | 8 +- .../src/hooks/services/useConnectorAuth.tsx | 2 +- .../services/useDestinationDefinition.tsx | 5 +- .../src/hooks/services/useDestinationHook.tsx | 11 +- .../src/hooks/services/useDocumentation.ts | 10 +- .../hooks/services/useSourceDefinition.tsx | 5 +- .../src/hooks/services/useSourceHook.tsx | 8 +- .../src/hooks/services/useWorkspace.tsx | 39 +- airbyte-webapp/src/hooks/useRouter.tsx | 39 +- airbyte-webapp/src/locales/en.json | 5 +- airbyte-webapp/src/packages/cloud/App.tsx | 48 +- .../src/packages/cloud/cloudRoutes.tsx | 198 +++ airbyte-webapp/src/packages/cloud/routes.tsx | 206 --- .../cloud/services/AppServicesProvider.tsx | 28 +- .../cloud/services/auth/AuthService.tsx | 5 +- .../services/workspaces/WorkspacesService.tsx | 195 +-- .../services/workspaces/useInitService.tsx | 14 + .../src/packages/cloud/views/auth/Auth.tsx | 65 +- .../ConfirmPasswordResetPage.tsx | 6 +- .../cloud/views/auth/LoginPage/LoginPage.tsx | 26 +- .../ResetPasswordPage/ResetPasswordPage.tsx | 4 +- .../cloud/views/auth/components/Header.tsx | 24 +- .../views/credits/CreditsPage/CreditsPage.tsx | 33 +- .../components/RemainingCredits.tsx | 6 +- .../views/layout/MainView/ErrorBoundary.tsx | 44 + .../InsufficientPermissionsErrorBoundary.tsx | 38 + .../cloud/views/layout/MainView/MainView.tsx | 21 +- .../cloud/views/layout/SideBar/SideBar.tsx | 47 +- .../{ => settings}/CloudSettingsPage.tsx | 45 +- .../WorkspacePopout/WorkspacePopout.tsx | 38 +- .../WorkspaceSettingsView.tsx | 182 ++- .../components/WorkspaceItem.tsx | 14 +- .../components/WorkspacesList.tsx | 28 +- .../pages/ConnectionPage/ConnectionPage.tsx | 54 +- .../AllConnectionsPage/AllConnectionsPage.tsx | 4 +- .../components/ConnectionsTable.tsx | 25 +- .../ConnectionItemPage/ConnectionItemPage.tsx | 123 +- .../components/SettingsView.tsx | 3 +- .../components/StatusMainInfo.tsx | 3 +- .../components/StatusView.tsx | 3 +- .../CreationFormPage/CreationFormPage.tsx | 42 +- .../components/DestinationForm.tsx | 15 +- .../components/SourceForm.tsx | 15 +- .../pages/DestinationPage/DestinationPage.tsx | 48 +- .../AllDestinationsPage.tsx | 5 +- .../components/DestinationsTable.tsx | 6 +- .../CreateDestinationPage.tsx | 4 +- .../components/DestinationForm.tsx | 2 +- .../DestinationItemPage.tsx | 32 +- .../components/DestinationConnectionTable.tsx | 4 +- .../components/DestinationSettings.tsx | 14 +- .../pages/OnboardingPage/OnboardingPage.tsx | 4 +- .../components/ConnectionStep.tsx | 3 +- .../components/DestinationStep.tsx | 2 +- .../components/ProgressBlock.tsx | 8 +- .../OnboardingPage/components/SourceStep.tsx | 2 +- .../pages/PreferencesPage/PreferencesPage.tsx | 30 +- .../src/pages/SettingsPage/SettingsPage.tsx | 53 +- .../pages/ConnectorsPage/DestinationsPage.tsx | 2 +- .../pages/ConnectorsPage/SourcesPage.tsx | 5 +- .../components/ConnectorsView.tsx | 9 +- .../components/CreateConnector.tsx | 22 +- .../ConnectorsPage/components/VersionCell.tsx | 4 +- .../src/pages/SourcesPage/SourcesPage.tsx | 55 +- .../pages/AllSourcesPage/AllSourcesPage.tsx | 4 +- .../components/SourcesTable.tsx | 7 +- .../CreateSourcePage/CreateSourcePage.tsx | 3 +- .../components/SourceForm.tsx | 2 +- .../pages/SourceItemPage/SourceItemPage.tsx | 27 +- .../components/SourceConnectionTable.tsx | 4 +- .../components/SourceSettings.tsx | 2 +- airbyte-webapp/src/pages/routes.tsx | 203 +-- .../services/workspaces/WorkspacesService.tsx | 79 ++ airbyte-webapp/src/utils/testutils.tsx | 15 +- .../src/views/CompleteOauthRequest.tsx | 4 +- .../TransformationForm/TransformationForm.tsx | 2 +- .../ServiceForm/ServiceForm.test.tsx | 49 +- .../components/Controls/Instruction.tsx | 3 +- .../components/Sections/FormSection.tsx | 10 +- .../Connector/ServiceForm/index.stories.tsx | 19 +- .../src/views/common/ErrorOccurredView.tsx | 5 +- .../common/ResorceNotFoundErrorBoundary.tsx | 42 + .../src/views/common/StartOverErrorView.tsx | 34 + .../src/views/layout/MainView/MainView.tsx | 8 +- .../src/views/layout/SideBar/SideBar.tsx | 27 +- airbyte-workers/Dockerfile | 4 +- .../workers/process/KubePodProcess.java | 9 + .../workers/process/KubeProcessFactory.java | 2 +- .../KubePodProcessIntegrationTest.java | 45 + .../airbyte-integration-test-java.gradle | 3 + charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 8 +- charts/airbyte/templates/_helpers.tpl | 4 +- charts/airbyte/values.yaml | 14 +- docs/SUMMARY.md | 1 + docs/integrations/README.md | 1 + docs/integrations/destinations/bigquery.md | 2 + docs/integrations/destinations/redshift.md | 8 + docs/integrations/destinations/s3.md | 1 + docs/integrations/destinations/snowflake.md | 3 + .../getting-started/destination-redshift.md | 6 +- .../sources/amazon-seller-partner.md | 2 + docs/integrations/sources/bing-ads.md | 1 + docs/integrations/sources/clickhouse.md | 3 +- docs/integrations/sources/drift.md | 3 +- .../sources/facebook-marketing.md | 3 + .../sources/google-analytics-v4.md | 1 + docs/integrations/sources/harvest.md | 1 + docs/integrations/sources/hubspot.md | 25 +- docs/integrations/sources/intercom.md | 1 + docs/integrations/sources/linnworks.md | 3 +- docs/integrations/sources/microsoft-teams.md | 1 + docs/integrations/sources/monday.md | 1 + docs/integrations/sources/openweather.md | 38 + .../sources/paypal-transaction.md | 1 + docs/integrations/sources/shopify.md | 1 + .../sources/snapchat-marketing.md | 1 + docs/integrations/sources/strava.md | 1 + docs/integrations/sources/zendesk-support.md | 1 + docs/operator-guides/upgrading-airbyte.md | 2 +- docs/operator-guides/using-prefect-task.md | 2 +- .../connections/README.md | 16 +- .../json-avro-conversion.md | 367 +++-- .../overlays/stable-with-resource-limits/.env | 2 +- .../kustomization.yaml | 12 +- kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 15 +- tools/bin/ci_credentials.sh | 0 tools/bin/ci_performance_test.sh | 20 +- tools/bin/release_version.sh | 4 +- tools/bin/tag_version.sh | 2 +- 438 files changed, 10040 insertions(+), 4624 deletions(-) create mode 100644 airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d8540a80-6120-485d-b7d6-272bca477d9b.json create mode 100644 airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriterTest.java create mode 100644 airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopier.java create mode 100644 airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopierFactory.java create mode 100644 airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfig.java create mode 100644 airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopierTest.java create mode 100644 airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfigTest.java create mode 100644 airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopierTest.java create mode 100644 airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierTest.java create mode 100644 airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.java create mode 100644 airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriterTest.java create mode 100644 airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT.json create mode 100644 airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights_action_breakdowns.json create mode 100644 airbyte-integrations/connectors/source-google-analytics-v4/integration_tests/expected_records.txt create mode 100644 airbyte-integrations/connectors/source-openweather/.dockerignore create mode 100644 airbyte-integrations/connectors/source-openweather/Dockerfile create mode 100644 airbyte-integrations/connectors/source-openweather/README.md create mode 100644 airbyte-integrations/connectors/source-openweather/acceptance-test-config.yml create mode 100755 airbyte-integrations/connectors/source-openweather/acceptance-test-docker.sh create mode 100644 airbyte-integrations/connectors/source-openweather/bootstrap.md create mode 100644 airbyte-integrations/connectors/source-openweather/build.gradle create mode 100644 airbyte-integrations/connectors/source-openweather/integration_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-openweather/integration_tests/abnormal_state.json create mode 100644 airbyte-integrations/connectors/source-openweather/integration_tests/acceptance.py create mode 100644 airbyte-integrations/connectors/source-openweather/integration_tests/catalog.json create mode 100644 airbyte-integrations/connectors/source-openweather/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-openweather/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/source-openweather/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/source-openweather/integration_tests/sample_state.json create mode 100644 airbyte-integrations/connectors/source-openweather/main.py create mode 100644 airbyte-integrations/connectors/source-openweather/requirements.txt create mode 100644 airbyte-integrations/connectors/source-openweather/setup.py create mode 100644 airbyte-integrations/connectors/source-openweather/source_openweather/__init__.py create mode 100644 airbyte-integrations/connectors/source-openweather/source_openweather/extra_validations.py create mode 100644 airbyte-integrations/connectors/source-openweather/source_openweather/schemas/one_call.json create mode 100644 airbyte-integrations/connectors/source-openweather/source_openweather/source.py create mode 100644 airbyte-integrations/connectors/source-openweather/source_openweather/spec.json create mode 100644 airbyte-integrations/connectors/source-openweather/source_openweather/streams.py create mode 100644 airbyte-integrations/connectors/source-openweather/unit_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-openweather/unit_tests/test_extra_validations.py create mode 100644 airbyte-integrations/connectors/source-openweather/unit_tests/test_source.py create mode 100644 airbyte-integrations/connectors/source-openweather/unit_tests/test_streams.py rename airbyte-integrations/connectors/source-shopify/source_shopify/schemas/{orders_refunds.json => order_refunds.json} (86%) rename airbyte-integrations/connectors/source-shopify/source_shopify/schemas/{orders_risks.json => order_risks.json} (100%) create mode 100644 airbyte-oauth/src/main/java/io/airbyte/oauth/flows/StravaOAuthFlow.java create mode 100644 airbyte-oauth/src/test/java/io/airbyte/oauth/flows/StravaOAuthFlowTest.java delete mode 100644 airbyte-webapp/src/constants/constants.ts rename airbyte-webapp/src/core/{defaultServices.tsx => ApiServices.tsx} (89%) create mode 100644 airbyte-webapp/src/core/domain/connector/constants.ts create mode 100644 airbyte-webapp/src/core/domain/workspace/Workspace.ts create mode 100644 airbyte-webapp/src/packages/cloud/cloudRoutes.tsx delete mode 100644 airbyte-webapp/src/packages/cloud/routes.tsx create mode 100644 airbyte-webapp/src/packages/cloud/services/workspaces/useInitService.tsx create mode 100644 airbyte-webapp/src/packages/cloud/views/layout/MainView/ErrorBoundary.tsx create mode 100644 airbyte-webapp/src/packages/cloud/views/layout/MainView/InsufficientPermissionsErrorBoundary.tsx rename airbyte-webapp/src/packages/cloud/views/{ => settings}/CloudSettingsPage.tsx (58%) create mode 100644 airbyte-webapp/src/services/workspaces/WorkspacesService.tsx create mode 100644 airbyte-webapp/src/views/common/ResorceNotFoundErrorBoundary.tsx create mode 100644 airbyte-webapp/src/views/common/StartOverErrorView.tsx create mode 100644 docs/integrations/sources/openweather.md create mode 100755 tools/bin/ci_credentials.sh diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 6ae95b538122..7164abe316cf 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.33.12-alpha +current_version = 0.34.1-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index dde8138871be..97b809640f44 100644 --- a/.env +++ b/.env @@ -1,4 +1,4 @@ -VERSION=0.33.12-alpha +VERSION=0.34.1-alpha # Airbyte Internal Job Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_USER=docker diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index 7d22d1bb62a1..cce2deb26b5d 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -345,7 +345,13 @@ jobs: node-version: "16.13.0" - name: Install Cypress Test Dependencies - run: sudo apt-get update && sudo apt-get install -y libgtk2.0-0 libgtk-3-0 libgbm-dev libnotify-dev libgconf-2-4 libnss3 libxss1 libasound2 libxtst6 xauth xvfb + run: | + # wait to receive lock (see https://askubuntu.com/questions/132059/how-to-make-a-package-manager-wait-if-another-instance-of-apt-is-running) + while sudo fuser /var/{lib/{dpkg,apt/lists},cache/apt/archives}/lock >/dev/null 2>&1; do + sleep 1 + done + + sudo apt-get update && sudo apt-get install -y libgtk2.0-0 libgtk-3-0 libgbm-dev libnotify-dev libgconf-2-4 libnss3 libxss1 libasound2 libxtst6 xauth xvfb - name: Set up CI Gradle Properties run: | @@ -448,6 +454,11 @@ jobs: - name: Install socat (required for port forwarding) run: | + # wait to receive lock (see https://askubuntu.com/questions/132059/how-to-make-a-package-manager-wait-if-another-instance-of-apt-is-running) + while sudo fuser /var/{lib/{dpkg,apt/lists},cache/apt/archives}/lock >/dev/null 2>&1; do + sleep 1 + done + sudo apt-get update sudo apt-get install socat @@ -492,7 +503,6 @@ jobs: AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} SECRET_STORE_GCP_CREDENTIALS: ${{ secrets.SECRET_STORE_GCP_CREDENTIALS }} SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }} - SECRET_STORE_FOR_CONFIGS: ${{ secrets.SECRET_STORE_FOR_CONFIGS }} run: | CI=true IS_MINIKUBE=true ./tools/bin/acceptance_test_kube.sh @@ -514,7 +524,6 @@ jobs: AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} SECRET_STORE_GCP_CREDENTIALS: ${{ secrets.SECRET_STORE_GCP_CREDENTIALS }} SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }} - SECRET_STORE_FOR_CONFIGS: ${{ secrets.SECRET_STORE_FOR_CONFIGS }} run: | CI=true ./tools/bin/gcp_acceptance_tests.sh diff --git a/.github/workflows/release-airbyte-os.yml b/.github/workflows/release-airbyte-os.yml index 10770ceb6e02..8f45f8411d38 100644 --- a/.github/workflows/release-airbyte-os.yml +++ b/.github/workflows/release-airbyte-os.yml @@ -17,15 +17,15 @@ jobs: fetch-depth: 0 - uses: actions/setup-java@v1 with: - java-version: '17' + java-version: "17" - uses: actions/setup-node@v1 with: - node-version: '16.13.0' + node-version: "16.13.0" - name: Save Old Version id: old_version run: | - echo ::set-output name=OLD_VERSION::$(grep VERSION .env | cut -d"=" -f2) + echo ::set-output name=OLD_VERSION::$(grep -w VERSION .env | cut -d"=" -f2) - name: Release Airbyte id: release_airbyte env: @@ -38,7 +38,7 @@ jobs: - name: Save New Version id: new_version run: | - echo ::set-output name=NEW_VERSION::$(grep VERSION .env | cut -d"=" -f2) + echo ::set-output name=NEW_VERSION::$(grep -w VERSION .env | cut -d"=" -f2) - name: Get PR Body id: pr_body env: diff --git a/.github/workflows/test-performance-command.yml b/.github/workflows/test-performance-command.yml index 3db5a501d246..1cf83a0b24a6 100644 --- a/.github/workflows/test-performance-command.yml +++ b/.github/workflows/test-performance-command.yml @@ -67,6 +67,10 @@ jobs: uses: actions/checkout@v2 with: repository: ${{ github.event.inputs.repo }} + - name: Install Java + uses: actions/setup-java@v1 + with: + java-version: '17' - name: Install Pyenv and Tox # Beside PyEnv, this does not set any runtimes up because it uses an AMI image that has everything pre-installed. See https://github.com/airbytehq/airbyte/issues/4559/ run: | diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index c131e923aa72..9d6f602929d7 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -5,6 +5,6 @@ ENV APPLICATION airbyte-bootloader WORKDIR /app -ADD bin/${APPLICATION}-0.33.12-alpha.tar /app +ADD bin/${APPLICATION}-0.34.1-alpha.tar /app -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.33.12-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.34.1-alpha/bin/${APPLICATION}"] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 859393b98d8a..40e5b2468edf 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.1.44 +Log http response status code and its content. + ## 0.1.43 Fix logging of unhandled exceptions: print stacktrace. diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/rate_limiting.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/rate_limiting.py index bb219a40a448..ab7cbba741cf 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/rate_limiting.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/rate_limiting.py @@ -21,6 +21,8 @@ def default_backoff_handler(max_tries: int, factor: int, **kwargs): def log_retry_attempt(details): _, exc, _ = sys.exc_info() + if exc.response: + logger.info(f"Status code: {exc.response.status_code}, Response Content: {exc.response.content}") logger.info( f"Caught retryable error '{str(exc)}' after {details['tries']} tries. Waiting {details['wait']} seconds then retrying..." ) @@ -48,6 +50,8 @@ def user_defined_backoff_handler(max_tries: int, **kwargs): def sleep_on_ratelimit(details): _, exc, _ = sys.exc_info() if isinstance(exc, UserDefinedBackoffException): + if exc.response: + logger.info(f"Status code: {exc.response.status_code}, Response Content: {exc.response.content}") retry_after = exc.backoff logger.info(f"Retrying. Sleeping for {retry_after} seconds") time.sleep(retry_after + 1) # extra second to cover any fractions of second diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index 48a75c906920..1ab0a74b6289 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -15,7 +15,7 @@ setup( name="airbyte-cdk", - version="0.1.43", + version="0.1.44", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/424892c4-daac-4491-b35d-c6688ba547ba.json b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/424892c4-daac-4491-b35d-c6688ba547ba.json index e748337dada5..20d735eb73a1 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/424892c4-daac-4491-b35d-c6688ba547ba.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/424892c4-daac-4491-b35d-c6688ba547ba.json @@ -2,7 +2,7 @@ "destinationDefinitionId": "424892c4-daac-4491-b35d-c6688ba547ba", "name": "Snowflake", "dockerRepository": "airbyte/destination-snowflake", - "dockerImageTag": "0.3.19", + "dockerImageTag": "0.3.21", "documentationUrl": "https://docs.airbyte.io/integrations/destinations/snowflake", "icon": "snowflake.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/200330b2-ea62-4d11-ac6d-cfe3e3f8ab2b.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/200330b2-ea62-4d11-ac6d-cfe3e3f8ab2b.json index 2e03684a9226..579b869ed2fd 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/200330b2-ea62-4d11-ac6d-cfe3e3f8ab2b.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/200330b2-ea62-4d11-ac6d-cfe3e3f8ab2b.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "200330b2-ea62-4d11-ac6d-cfe3e3f8ab2b", "name": "Snapchat Marketing", "dockerRepository": "airbyte/source-snapchat-marketing", - "dockerImageTag": "0.1.2", + "dockerImageTag": "0.1.4", "documentationUrl": "https://docs.airbyte.io/integrations/sources/snapchat-marketing", "icon": "snapchat.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/36c891d9-4bd9-43ac-bad2-10e12756272c.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/36c891d9-4bd9-43ac-bad2-10e12756272c.json index de7146c0e368..913968015e74 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/36c891d9-4bd9-43ac-bad2-10e12756272c.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/36c891d9-4bd9-43ac-bad2-10e12756272c.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "36c891d9-4bd9-43ac-bad2-10e12756272c", "name": "HubSpot", "dockerRepository": "airbyte/source-hubspot", - "dockerImageTag": "0.1.26", + "dockerImageTag": "0.1.29", "documentationUrl": "https://docs.airbyte.io/integrations/sources/hubspot", "icon": "hubspot.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/445831eb-78db-4b1f-8f1f-0d96ad8739e2.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/445831eb-78db-4b1f-8f1f-0d96ad8739e2.json index eff49bd97360..247e0ea71820 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/445831eb-78db-4b1f-8f1f-0d96ad8739e2.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/445831eb-78db-4b1f-8f1f-0d96ad8739e2.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "445831eb-78db-4b1f-8f1f-0d96ad8739e2", "name": "Drift", "dockerRepository": "airbyte/source-drift", - "dockerImageTag": "0.2.4", + "dockerImageTag": "0.2.5", "documentationUrl": "https://docs.airbyte.io/integrations/sources/drift", "icon": "drift.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/47f25999-dd5e-4636-8c39-e7cea2453331.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/47f25999-dd5e-4636-8c39-e7cea2453331.json index f13ddcc133d3..6b06a621e092 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/47f25999-dd5e-4636-8c39-e7cea2453331.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/47f25999-dd5e-4636-8c39-e7cea2453331.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "47f25999-dd5e-4636-8c39-e7cea2453331", "name": "Bing Ads", "dockerRepository": "airbyte/source-bing-ads", - "dockerImageTag": "0.1.1", + "dockerImageTag": "0.1.2", "documentationUrl": "https://docs.airbyte.io/integrations/sources/bing-ads", "icon": "bingads.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json index d87c3137c48a..5c5cbe68d36e 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "79c1aa37-dae3-42ae-b333-d1c105477715", "name": "Zendesk Support", "dockerRepository": "airbyte/source-zendesk-support", - "dockerImageTag": "0.1.10", + "dockerImageTag": "0.1.9", "documentationUrl": "https://docs.airbyte.io/integrations/sources/zendesk-support", "icon": "zendesk.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/7a4327c4-315a-11ec-8d3d-0242ac130003.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/7a4327c4-315a-11ec-8d3d-0242ac130003.json index 9ea3a861e9de..263995db6088 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/7a4327c4-315a-11ec-8d3d-0242ac130003.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/7a4327c4-315a-11ec-8d3d-0242ac130003.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "7a4327c4-315a-11ec-8d3d-0242ac130003", "name": "Strava", "dockerRepository": "airbyte/source-strava", - "dockerImageTag": "0.1.1", + "dockerImageTag": "0.1.2", "documentationUrl": "https://docs.airbyte.io/integrations/sources/strava", "icon": "strava.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/80a54ea2-9959-4040-aac1-eee42423ec9b.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/80a54ea2-9959-4040-aac1-eee42423ec9b.json index 7f129ff89940..727f7a5e008a 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/80a54ea2-9959-4040-aac1-eee42423ec9b.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/80a54ea2-9959-4040-aac1-eee42423ec9b.json @@ -1,8 +1,8 @@ { "sourceDefinitionId": "80a54ea2-9959-4040-aac1-eee42423ec9b", "name": "Monday", - "dockerRepository": "airbyte/source-zendesk-monday", - "dockerImageTag": "0.1.0", + "dockerRepository": "airbyte/source-monday", + "dockerImageTag": "0.1.2", "documentationUrl": "https://docs.airbyte.io/integrations/sources/monday", "icon": "monday.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/9da77001-af33-4bcd-be46-6252bf9342b9.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/9da77001-af33-4bcd-be46-6252bf9342b9.json index 724874f83d2d..a3a19d62e123 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/9da77001-af33-4bcd-be46-6252bf9342b9.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/9da77001-af33-4bcd-be46-6252bf9342b9.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "9da77001-af33-4bcd-be46-6252bf9342b9", "name": "Shopify", "dockerRepository": "airbyte/source-shopify", - "dockerImageTag": "0.1.25", + "dockerImageTag": "0.1.26", "documentationUrl": "https://docs.airbyte.io/integrations/sources/shopify", "icon": "shopify.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/bad83517-5e54-4a3d-9b53-63e85fbd4d7c.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/bad83517-5e54-4a3d-9b53-63e85fbd4d7c.json index b6ebd32c8af7..1dd6491075fd 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/bad83517-5e54-4a3d-9b53-63e85fbd4d7c.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/bad83517-5e54-4a3d-9b53-63e85fbd4d7c.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "bad83517-5e54-4a3d-9b53-63e85fbd4d7c", "name": "ClickHouse", "dockerRepository": "airbyte/source-clickhouse", - "dockerImageTag": "0.1.5", + "dockerImageTag": "0.1.6", "documentationUrl": "https://docs.airbyte.io/integrations/sources/clickhouse", "icon": "cliskhouse.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d8540a80-6120-485d-b7d6-272bca477d9b.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d8540a80-6120-485d-b7d6-272bca477d9b.json new file mode 100644 index 000000000000..31c8deb32c49 --- /dev/null +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d8540a80-6120-485d-b7d6-272bca477d9b.json @@ -0,0 +1,8 @@ +{ + "sourceDefinitionId": "d8540a80-6120-485d-b7d6-272bca477d9b", + "name": "OpenWeather", + "dockerRepository": "airbyte/source-openweather", + "dockerImageTag": "0.1.0", + "documentationUrl": "https://docs.airbyte.io/integrations/sources/openweather", + "icon": "openweather.svg" +} diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d913b0f2-cc51-4e55-a44c-8ba1697b9239.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d913b0f2-cc51-4e55-a44c-8ba1697b9239.json index 558223e7b925..84b99570642b 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d913b0f2-cc51-4e55-a44c-8ba1697b9239.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d913b0f2-cc51-4e55-a44c-8ba1697b9239.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "d913b0f2-cc51-4e55-a44c-8ba1697b9239", "name": "Paypal Transaction", "dockerRepository": "airbyte/source-paypal-transaction", - "dockerImageTag": "0.1.2", + "dockerImageTag": "0.1.3", "documentationUrl": "https://docs.airbyte.io/integrations/sources/paypal-transaction", "icon": "paypal.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e55879a8-0ef8-4557-abcf-ab34c53ec460.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e55879a8-0ef8-4557-abcf-ab34c53ec460.json index cd8b17fa588f..7b07f016b488 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e55879a8-0ef8-4557-abcf-ab34c53ec460.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e55879a8-0ef8-4557-abcf-ab34c53ec460.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "e55879a8-0ef8-4557-abcf-ab34c53ec460", "name": "Amazon Seller Partner", "dockerRepository": "airbyte/source-amazon-seller-partner", - "dockerImageTag": "0.2.5", + "dockerImageTag": "0.2.6", "documentationUrl": "https://docs.airbyte.io/integrations/sources/amazon-seller-partner", "icon": "amazonsellerpartner.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json index bc4aedf95d14..591f2e7982bc 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "e7778cfc-e97c-4458-9ecb-b4f2bba8946c", "name": "Facebook Marketing", "dockerRepository": "airbyte/source-facebook-marketing", - "dockerImageTag": "0.2.26", + "dockerImageTag": "0.2.29", "documentationUrl": "https://docs.airbyte.io/integrations/sources/facebook-marketing", "icon": "facebook.svg" } diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/fe2b4084-3386-4d3b-9ad6-308f61a6f1e6.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/fe2b4084-3386-4d3b-9ad6-308f61a6f1e6.json index 9f6be434651a..df959e0c5588 100644 --- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/fe2b4084-3386-4d3b-9ad6-308f61a6f1e6.json +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/fe2b4084-3386-4d3b-9ad6-308f61a6f1e6.json @@ -2,7 +2,7 @@ "sourceDefinitionId": "fe2b4084-3386-4d3b-9ad6-308f61a6f1e6", "name": "Harvest", "dockerRepository": "airbyte/source-harvest", - "dockerImageTag": "0.1.6", + "dockerImageTag": "0.1.8", "documentationUrl": "https://docs.airbyte.io/integrations/sources/harvest", "icon": "harvest.svg" } diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 32c6a5218e5f..4705e5eb53e8 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -13,13 +13,13 @@ - name: BigQuery destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133 dockerRepository: airbyte/destination-bigquery - dockerImageTag: 0.5.0 + dockerImageTag: 0.5.1 documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery icon: bigquery.svg - name: BigQuery (denormalized typed struct) destinationDefinitionId: 079d5540-f236-4294-ba7c-ade8fd918496 dockerRepository: airbyte/destination-bigquery-denormalized - dockerImageTag: 0.1.10 + dockerImageTag: 0.1.11 documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery icon: bigquery.svg - name: Cassandra @@ -150,7 +150,7 @@ - name: Redshift destinationDefinitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc dockerRepository: airbyte/destination-redshift - dockerImageTag: 0.3.21 + dockerImageTag: 0.3.23 documentationUrl: https://docs.airbyte.io/integrations/destinations/redshift icon: redshift.svg - name: Rockset @@ -161,7 +161,7 @@ - name: S3 destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362 dockerRepository: airbyte/destination-s3 - dockerImageTag: 0.1.16 + dockerImageTag: 0.2.0 documentationUrl: https://docs.airbyte.io/integrations/destinations/s3 icon: s3.svg - name: SFTP-JSON @@ -173,7 +173,7 @@ - name: Snowflake destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba dockerRepository: airbyte/destination-snowflake - dockerImageTag: 0.3.20 + dockerImageTag: 0.3.21 documentationUrl: https://docs.airbyte.io/integrations/destinations/snowflake icon: snowflake.svg - name: MariaDB ColumnStore diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 21fdff9a65a3..bc92ecd78d71 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -176,7 +176,7 @@ supportsDBT: false supported_destination_sync_modes: - "append" -- dockerImage: "airbyte/destination-bigquery:0.5.0" +- dockerImage: "airbyte/destination-bigquery:0.5.1" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" connectionSpecification: @@ -226,27 +226,29 @@ - "asia-northeast2" - "asia-northeast3" - "asia-south1" + - "asia-south2" - "asia-southeast1" - "asia-southeast2" - "australia-southeast1" - - "europe-central1" + - "australia-southeast2" - "europe-central2" - "europe-north1" - "europe-west1" - "europe-west2" - "europe-west3" - "europe-west4" - - "europe-west5" - "europe-west6" - "northamerica-northeast1" + - "northamerica-northeast2" - "southamerica-east1" + - "southamerica-west1" - "us-central1" - "us-east1" - "us-east4" - - "us-west-1" - - "us-west-2" - - "us-west-3" - - "us-west-4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" credentials_json: type: "string" description: "The contents of the JSON service account key. Check out the\ @@ -352,7 +354,7 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/destination-bigquery-denormalized:0.1.10" +- dockerImage: "airbyte/destination-bigquery-denormalized:0.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" connectionSpecification: @@ -389,27 +391,29 @@ - "asia-northeast2" - "asia-northeast3" - "asia-south1" + - "asia-south2" - "asia-southeast1" - "asia-southeast2" - "australia-southeast1" - - "europe-central1" + - "australia-southeast2" - "europe-central2" - "europe-north1" - "europe-west1" - "europe-west2" - "europe-west3" - "europe-west4" - - "europe-west5" - "europe-west6" - "northamerica-northeast1" + - "northamerica-northeast2" - "southamerica-east1" + - "southamerica-west1" - "us-central1" - "us-east1" - "us-east4" - - "us-west-1" - - "us-west-2" - - "us-west-3" - - "us-west-4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" credentials_json: type: "string" description: "The contents of the JSON service account key. Check out the\ @@ -2914,7 +2918,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-redshift:0.3.21" +- dockerImage: "airbyte/destination-redshift:0.3.23" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift" connectionSpecification: @@ -2975,6 +2979,13 @@ >AWS docs for more details." examples: - "airbyte.staging" + s3_bucket_path: + title: "S3 Bucket Path" + type: "string" + description: "The directory under the S3 bucket where data will be written.\ + \ If not provided, then defaults to the root directory." + examples: + - "data_sync/test" s3_bucket_region: title: "S3 Bucket Region" type: "string" @@ -3032,6 +3043,13 @@ \ in larger memory requirements. A rule of thumb is to multiply the part\ \ size by 10 to get the memory requirement. Modify this with care." title: "Stream Part Size" + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: "Whether to delete the staging files from S3 after completing\ + \ the sync. See the docs for details. Only relevant for COPY. Defaults\ + \ to true." + default: true supportsIncremental: true supportsNormalization: true supportsDBT: true @@ -3082,7 +3100,7 @@ supported_destination_sync_modes: - "append" - "overwrite" -- dockerImage: "airbyte/destination-s3:0.1.16" +- dockerImage: "airbyte/destination-s3:0.2.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3" connectionSpecification: @@ -3453,7 +3471,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-snowflake:0.3.20" +- dockerImage: "airbyte/destination-snowflake:0.3.21" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake" connectionSpecification: diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index f06bf54d75b4..f71c9a595101 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -22,10 +22,16 @@ - name: Amazon Seller Partner sourceDefinitionId: e55879a8-0ef8-4557-abcf-ab34c53ec460 dockerRepository: airbyte/source-amazon-seller-partner - dockerImageTag: 0.2.5 + dockerImageTag: 0.2.6 sourceType: api documentationUrl: https://docs.airbyte.io/integrations/sources/amazon-seller-partner icon: amazonsellerpartner.svg +- name: Amazon SQS + sourceDefinitionId: 983fd355-6bf3-4709-91b5-37afa391eeb6 + dockerRepository: airbyte/source-amazon-sqs + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/amazon-sqs + sourceType: api - name: Amplitude sourceDefinitionId: fa9f58c6-2d03-4237-aaa4-07d75e0c1396 dockerRepository: airbyte/source-amplitude @@ -85,7 +91,7 @@ - name: Bing Ads sourceDefinitionId: 47f25999-dd5e-4636-8c39-e7cea2453331 dockerRepository: airbyte/source-bing-ads - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/bing-ads icon: bingads.svg sourceType: api @@ -113,7 +119,7 @@ - name: ClickHouse sourceDefinitionId: bad83517-5e54-4a3d-9b53-63e85fbd4d7c dockerRepository: airbyte/source-clickhouse - dockerImageTag: 0.1.5 + dockerImageTag: 0.1.6 documentationUrl: https://docs.airbyte.io/integrations/sources/clickhouse icon: cliskhouse.svg sourceType: database @@ -155,7 +161,7 @@ - name: Drift sourceDefinitionId: 445831eb-78db-4b1f-8f1f-0d96ad8739e2 dockerRepository: airbyte/source-drift - dockerImageTag: 0.2.4 + dockerImageTag: 0.2.5 documentationUrl: https://docs.airbyte.io/integrations/sources/drift icon: drift.svg sourceType: api @@ -169,7 +175,7 @@ - name: Facebook Marketing sourceDefinitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c dockerRepository: airbyte/source-facebook-marketing - dockerImageTag: 0.2.26 + dockerImageTag: 0.2.29 documentationUrl: https://docs.airbyte.io/integrations/sources/facebook-marketing icon: facebook.svg sourceType: api @@ -232,7 +238,7 @@ - name: Google Analytics sourceDefinitionId: eff3616a-f9c3-11eb-9a03-0242ac130003 dockerRepository: airbyte/source-google-analytics-v4 - dockerImageTag: 0.1.13 + dockerImageTag: 0.1.14 documentationUrl: https://docs.airbyte.io/integrations/sources/google-analytics-v4 icon: google-analytics.svg sourceType: api @@ -274,14 +280,14 @@ - name: Harvest sourceDefinitionId: fe2b4084-3386-4d3b-9ad6-308f61a6f1e6 dockerRepository: airbyte/source-harvest - dockerImageTag: 0.1.7 + dockerImageTag: 0.1.8 documentationUrl: https://docs.airbyte.io/integrations/sources/harvest icon: harvest.svg sourceType: api - name: HubSpot sourceDefinitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c dockerRepository: airbyte/source-hubspot - dockerImageTag: 0.1.26 + dockerImageTag: 0.1.29 documentationUrl: https://docs.airbyte.io/integrations/sources/hubspot icon: hubspot.svg sourceType: api @@ -302,7 +308,7 @@ - name: Intercom sourceDefinitionId: d8313939-3782-41b0-be29-b3ca20d8dd3a dockerRepository: airbyte/source-intercom - dockerImageTag: 0.1.11 + dockerImageTag: 0.1.12 documentationUrl: https://docs.airbyte.io/integrations/sources/intercom icon: intercom.svg sourceType: api @@ -334,6 +340,12 @@ documentationUrl: https://docs.airbyte.io/integrations/sources/klaviyo icon: klaviyo.svg sourceType: api +- name: Lemlist + sourceDefinitionId: 789f8e7a-2d28-11ec-8d3d-0242ac130003 + dockerRepository: airbyte/source-lemlist + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/source-lemlist + sourceType: api - name: Lever Hiring sourceDefinitionId: 3981c999-bd7d-4afc-849b-e53dea90c948 dockerRepository: airbyte/source-lever-hiring @@ -351,7 +363,7 @@ - name: Linnworks sourceDefinitionId: 7b86879e-26c5-4ef6-a5ce-2be5c7b46d1e dockerRepository: airbyte/source-linnworks - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/linnworks icon: linnworks.svg sourceType: api @@ -393,7 +405,7 @@ - name: Microsoft teams sourceDefinitionId: eaf50f04-21dd-4620-913b-2a83f5635227 dockerRepository: airbyte/source-microsoft-teams - dockerImageTag: 0.2.4 + dockerImageTag: 0.2.5 documentationUrl: https://docs.airbyte.io/integrations/sources/microsoft-teams icon: microsoft-teams.svg sourceType: api @@ -407,7 +419,7 @@ - name: Monday sourceDefinitionId: 80a54ea2-9959-4040-aac1-eee42423ec9b dockerRepository: airbyte/source-monday - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/monday icon: monday.svg sourceType: api @@ -446,6 +458,12 @@ documentationUrl: https://docs.airbyte.io/integrations/sources/lever-onesignal icon: onesignal.svg sourceType: api +- name: OpenWeather + sourceDefinitionId: d8540a80-6120-485d-b7d6-272bca477d9b + dockerRepository: airbyte/source-openweather + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/openweather + sourceType: api - name: Oracle DB sourceDefinitionId: b39a7370-74c3-45a6-ac3a-380d48520a83 dockerRepository: airbyte/source-oracle @@ -463,7 +481,7 @@ - name: Paypal Transaction sourceDefinitionId: d913b0f2-cc51-4e55-a44c-8ba1697b9239 dockerRepository: airbyte/source-paypal-transaction - dockerImageTag: 0.1.2 + dockerImageTag: 0.1.3 documentationUrl: https://docs.airbyte.io/integrations/sources/paypal-transaction icon: paypal.svg sourceType: api @@ -589,7 +607,7 @@ - name: Shopify sourceDefinitionId: 9da77001-af33-4bcd-be46-6252bf9342b9 dockerRepository: airbyte/source-shopify - dockerImageTag: 0.1.25 + dockerImageTag: 0.1.26 documentationUrl: https://docs.airbyte.io/integrations/sources/shopify icon: shopify.svg sourceType: api @@ -617,7 +635,7 @@ - name: Snapchat Marketing sourceDefinitionId: 200330b2-ea62-4d11-ac6d-cfe3e3f8ab2b dockerRepository: airbyte/source-snapchat-marketing - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/snapchat-marketing icon: snapchat.svg sourceType: api @@ -638,7 +656,7 @@ - sourceDefinitionId: 7a4327c4-315a-11ec-8d3d-0242ac130003 name: Strava dockerRepository: airbyte/source-strava - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/strava icon: strava.svg - name: Stripe @@ -721,7 +739,7 @@ - name: Zendesk Support sourceDefinitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 dockerRepository: airbyte/source-zendesk-support - dockerImageTag: 0.1.10 + dockerImageTag: 0.1.9 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-support icon: zendesk.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 3ef87ad51e45..b4c2dd4725a3 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -153,7 +153,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-amazon-seller-partner:0.2.5" +- dockerImage: "airbyte/source-amazon-seller-partner:0.2.6" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/amazon-seller-partner" changelogUrl: "https://docs.airbyte.io/integrations/sources/amazon-seller-partner" @@ -179,6 +179,15 @@ - "30" - "365" type: "integer" + report_options: + title: "Report Options" + description: "Additional information passed to reports. This varies by report\ + \ type. Must be a valid json string." + examples: + - "{\"GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT\": {\"reportPeriod\": \"WEEK\"\ + }}" + - "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}" + type: "string" refresh_token: title: "Refresh Token" description: "The Refresh Token obtained via OAuth flow authorization." @@ -293,6 +302,115 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-amazon-sqs:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/amazon-sqs" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Amazon SQS Source Spec" + type: "object" + required: + - "queue_url" + - "region" + - "delete_messages" + additionalProperties: false + properties: + queue_url: + title: "Queue URL" + description: "URL of the SQS Queue" + type: "string" + examples: + - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" + order: 0 + region: + title: "AWS Region" + description: "AWS Region of the SQS Queue" + type: "string" + enum: + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" + order: 1 + delete_messages: + title: "Delete Messages After Read" + description: "If Enabled, messages will be deleted from the SQS Queue after\ + \ being read. If Disabled, messages are left in the queue and can be read\ + \ more than once. WARNING: Enabling this option can result in data loss\ + \ in cases of failure, use with caution, see documentation for more detail. " + type: "boolean" + default: false + order: 2 + max_batch_size: + title: "Max Batch Size" + description: "Max amount of messages to get in one batch (10 max)" + type: "integer" + examples: + - "5" + order: 3 + max_wait_time: + title: "Max Wait Time" + description: "Max amount of time in seconds to wait for messages in a single\ + \ poll (20 max)" + type: "integer" + examples: + - "5" + order: 4 + attributes_to_return: + title: "Message Attributes To Return" + description: "Comma separated list of Mesage Attribute names to return" + type: "string" + examples: + - "attr1,attr2" + order: 5 + visibility_timeout: + title: "Message Visibility Timeout" + description: "Modify the Visibility Timeout of the individual message from\ + \ the Queue's default (seconds)." + type: "integer" + examples: + - "15" + order: 6 + access_key: + title: "AWS IAM Access Key ID" + description: "The Access Key ID of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "xxxxxHRNxxx3TBxxxxxx" + airbyte_secret: true + order: 7 + secret_key: + title: "AWS IAM Secret Key" + description: "The Secret Key of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" + airbyte_secret: true + order: 8 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-amplitude:0.1.3" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/amplitude" @@ -588,7 +706,7 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/source-bing-ads:0.1.1" +- dockerImage: "airbyte/source-bing-ads:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/bing-ads" connectionSpecification: @@ -613,7 +731,7 @@ accounts: title: "Accounts" type: "object" - description: "Account selection strategy." + description: "Account selection." oneOf: - title: "All accounts assigned to your user" additionalProperties: false @@ -628,7 +746,7 @@ const: "all" - title: "Subset of your accounts" additionalProperties: false - description: "Fetch data for subset of account ids." + description: "Fetch data for subset of account IDs." required: - "ids" - "selection_strategy" @@ -640,6 +758,7 @@ const: "subset" ids: type: "array" + title: "IDs" description: "List of accounts from which data will be fetched." items: type: "string" @@ -647,33 +766,43 @@ uniqueItems: true client_id: type: "string" - description: "ID of your Microsoft Advertising client application." + title: "Client ID" + description: "The Client ID of your Microsoft Advertising developer application." airbyte_secret: true client_secret: type: "string" - description: "Secret of your Microsoft Advertising client application." + title: "Client Secret" + description: "The Client Secret of your Microsoft Advertising developer\ + \ application." airbyte_secret: true customer_id: type: "string" - description: "User's customer ID." + title: "Customer ID" + description: "User's Customer ID. Go to your Accounts and Billing page.\ + \ Your Customer ID will be listed on the Accounts tab under the heading\ + \ Customer." developer_token: type: "string" + title: "Developer Token" description: "Developer token associated with user." airbyte_secret: true refresh_token: type: "string" - description: "The long-lived Refresh token received via grant_type=refresh_token\ - \ request." + title: "Refresh Token" + description: "Refresh Token to renew the expired Access Token." airbyte_secret: true user_id: type: "string" - description: "Unique user identifier." + title: "Account ID" + description: "Bing Ads Account ID. You can find Account ID by going to your\ + \ profile and selecting Accounts and Billing." reports_start_date: type: "string" + title: "Reports Start Date" format: "date" default: "2020-01-01" - description: "From which date perform initial sync for report related streams.\ - \ In YYYY-MM-DD format" + description: "UTC date in YYYY-MM-DD format. Any reports before this date\ + \ will not be replicated." hourly_reports: title: "Hourly reports" type: "boolean" @@ -840,7 +969,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-clickhouse:0.1.5" +- dockerImage: "airbyte/source-clickhouse:0.1.6" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/clickhouse" connectionSpecification: @@ -855,10 +984,12 @@ additionalProperties: false properties: host: - description: "Host Endpoint of the Clickhouse Cluster" + description: "The host endpoint of the Clickhouse cluster." + title: "Host" type: "string" port: - description: "Port of the database." + description: "The port of the database." + title: "Port" type: "integer" minimum: 0 maximum: 65536 @@ -866,15 +997,18 @@ examples: - "8123" database: - description: "Name of the database." + description: "The name of the database." + title: "Database" type: "string" examples: - "default" username: - description: "Username to use to access the database." + description: "The username which is used to access the database." + title: "Username" type: "string" password: - description: "Password associated with the username." + description: "The password associated with this username." + title: "Password" type: "string" airbyte_secret: true ssl: @@ -1173,7 +1307,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-drift:0.2.4" +- dockerImage: "airbyte/source-drift:0.2.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/drift" connectionSpecification: @@ -1220,7 +1354,7 @@ refresh_token: type: "string" title: "Refresh Token" - description: "Refresh Token to renew the expired access_token." + description: "Refresh Token to renew the expired Access Token." default: "" airbyte_secret: true - title: "Access Token" @@ -1294,7 +1428,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-facebook-marketing:0.2.26" +- dockerImage: "airbyte/source-facebook-marketing:0.2.29" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/facebook-marketing" changelogUrl: "https://docs.airbyte.io/integrations/sources/facebook-marketing" @@ -1335,9 +1469,15 @@ - "2017-01-26T00:00:00Z" type: "string" format: "date-time" + fetch_thumbnail_images: + title: "Fetch Thumbnail Images" + description: "In each Ad Creative, fetch the thumbnail_url and store the\ + \ result in thumbnail_data_url" + default: false + type: "boolean" include_deleted: title: "Include Deleted" - description: "Include data from deleted campaigns, ads, and adsets." + description: "Include data from deleted campaigns, ads, and adsets" default: false type: "boolean" insights_lookback_window: @@ -1349,8 +1489,8 @@ type: "integer" insights_days_per_job: title: "Insights Days Per Job" - description: "Number of days to sync in one job. The more data you have\ - \ - the smaller you want this parameter to be." + description: "Number of days to sync in one job (the more data you have,\ + \ the smaller this parameter should be)" default: 7 minimum: 1 maximum: 30 @@ -2077,7 +2217,7 @@ oauthFlowOutputParameters: - - "access_token" - - "refresh_token" -- dockerImage: "airbyte/source-google-analytics-v4:0.1.13" +- dockerImage: "airbyte/source-google-analytics-v4:0.1.14" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/google-analytics-v4" connectionSpecification: @@ -2090,26 +2230,30 @@ additionalProperties: true properties: view_id: + order: 2 type: "string" title: "View ID" description: "The ID for the Google Analytics View you want to fetch data\ \ from. This can be found from the Google Analytics Account Explorer." - airbyte_secret: true start_date: + order: 1 type: "string" title: "Start Date" description: "A date in the format YYYY-MM-DD." examples: - "2020-06-01" custom_reports: - title: "Custom Reports" + order: 3 type: "string" + title: "Custom Reports (optional)" description: "A JSON array describing the custom reports you want to sync\ \ from GA. Check out the docs to get more information about this field." credentials: + order: 0 type: "object" + title: "Credentials" oneOf: - title: "Authenticate via Google (Oauth)" type: "object" @@ -2519,7 +2663,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-harvest:0.1.7" +- dockerImage: "airbyte/source-harvest:0.1.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/harvest" connectionSpecification: @@ -2534,12 +2678,12 @@ account_id: title: "Account ID" description: "Harvest account ID. Required for all Harvest requests in pair\ - \ with API Key" + \ with Personal Access Token" airbyte_secret: true type: "string" order: 0 replication_start_date: - title: "Replication Start Date" + title: "Start Date" description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ \ data before this date will not be replicated." pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" @@ -2549,12 +2693,12 @@ order: 1 credentials: title: "Authentication mechanism" - description: "Choose how to authenticate to Harvest" + description: "Choose how to authenticate to Harvest." type: "object" order: 2 oneOf: - type: "object" - title: "Authenticate via Harvest (Oauth)" + title: "Authenticate via Harvest (OAuth)" required: - "client_id" - "client_secret" @@ -2571,17 +2715,16 @@ client_id: title: "Client ID" type: "string" - description: "The Client ID of your application" + description: "The Client ID of your Harvest developer application." client_secret: title: "Client Secret" type: "string" - description: "The client secret of your application" + description: "The Client Secret of your Harvest developer application." airbyte_secret: true refresh_token: title: "Refresh Token" type: "string" - description: "A refresh token generated using the above client ID\ - \ and secret" + description: "Refresh Token to renew the expired Access Token." airbyte_secret: true - type: "object" title: "Authenticate with Personal Access Token" @@ -2656,7 +2799,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/source-hubspot:0.1.26" +- dockerImage: "airbyte/source-hubspot:0.1.29" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/hubspot" connectionSpecification: @@ -2666,11 +2809,11 @@ required: - "start_date" - "credentials" - additionalProperties: false + additionalProperties: true properties: start_date: type: "string" - title: "Replication start date" + title: "Start Date" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ \ data before this date will not be replicated." @@ -2678,11 +2821,11 @@ - "2017-01-25T00:00:00Z" credentials: title: "Authentication mechanism" - description: "Choose either to provide the API key or the OAuth2.0 credentials" + description: "Choose how to authenticate to HubSpot." type: "object" oneOf: - type: "object" - title: "Authenticate via HubSpot (Oauth)" + title: "Authenticate via HubSpot (OAuth)" required: - "client_id" - "client_secret" @@ -2691,7 +2834,7 @@ properties: credentials_title: type: "string" - title: "Credentials title" + title: "Credentials Title" description: "Name of the credentials set" const: "OAuth Credentials" enum: @@ -2700,22 +2843,25 @@ order: 0 client_id: title: "Client ID" - description: "HubSpot client_id. See our docs if you need help finding this id." type: "string" examples: - "123456789000" client_secret: title: "Client Secret" - description: "HubSpot client_secret. See our docs if you need help finding this secret." type: "string" examples: - "secret" airbyte_secret: true refresh_token: - title: "Refresh token" - description: "HubSpot refresh_token. See our docs if you need help generating the token." type: "string" examples: @@ -2887,7 +3033,7 @@ oauthFlowInitParameters: [] oauthFlowOutputParameters: - - "access_token" -- dockerImage: "airbyte/source-intercom:0.1.11" +- dockerImage: "airbyte/source-intercom:0.1.12" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/intercom" connectionSpecification: @@ -2901,18 +3047,18 @@ properties: start_date: type: "string" - description: "The date from which you'd like to replicate data for Intercom\ - \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ - \ date will be replicated." + title: "Start Date" + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." examples: - "2020-11-16T00:00:00Z" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" access_token: title: "Access Token" type: "string" - description: "Access token generated either from an oauth flow or from the\ - \ Intercom Developer dashboard. See the docs for more information on how to obtain this key manually." + description: "Access Token for making authenticated requests. See the docs\ + \ for more information on how to obtain this key manually." airbyte_secret: true supportsNormalization: false supportsDBT: false @@ -3299,6 +3445,24 @@ supportsDBT: false supported_destination_sync_modes: - "append" +- dockerImage: "airbyte/source-lemlist:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/source-lemlist" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Lemlist Spec" + type: "object" + required: + - "api_key" + additionalProperties: false + properties: + api_key: + type: "string" + description: "API key to access your lemlist account." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-lever-hiring:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/lever-hiring" @@ -3436,9 +3600,9 @@ - - "client_secret" oauthFlowOutputParameters: - - "refresh_token" -- dockerImage: "airbyte/source-linnworks:0.1.3" +- dockerImage: "airbyte/source-linnworks:0.1.4" spec: - documentationUrl: "https://docsurl.com" + documentationUrl: "https://docs.airbyte.io/integrations/sources/linnworks" connectionSpecification: $schema: "http://json-schema.org/draft-07/schema#" title: "Linnworks Spec" @@ -3816,7 +3980,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-microsoft-teams:0.2.4" +- dockerImage: "airbyte/source-microsoft-teams:0.2.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/microsoft-teams" connectionSpecification: @@ -3829,6 +3993,7 @@ properties: period: type: "string" + title: "Period" description: "Specifies the length of time over which the Team Device Report\ \ stream is aggregated. The supported values are: D7, D30, D90, and D180." examples: @@ -3857,21 +4022,26 @@ tenant_id: title: "Directory (tenant) ID" type: "string" - description: "Directory (tenant) ID" + description: "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" client_id: - title: "Application (client) ID" + title: "Client ID" type: "string" - description: "Application (client) ID" + description: "The Client ID of your Microsoft Teams developer application." client_secret: title: "Client Secret" type: "string" - description: "Client secret" + description: "The Client Secret of your Microsoft Teams developer\ + \ application." airbyte_secret: true refresh_token: title: "Refresh Token" type: "string" - description: "A refresh token generated using the above client ID\ - \ and secret" + description: "A Refresh Token to renew the expired Access Token." airbyte_secret: true - type: "object" title: "Authenticate via Microsoft" @@ -3891,15 +4061,21 @@ tenant_id: title: "Directory (tenant) ID" type: "string" - description: "Directory (tenant) ID" + description: "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" client_id: - title: "Application (client) ID" + title: "Client ID" type: "string" - description: "Application (client) ID" + description: "The Client ID of your Microsoft Teams developer application." client_secret: title: "Client Secret" type: "string" - description: "Client secret" + description: "The Client Secret of your Microsoft Teams developer\ + \ application." airbyte_secret: true supportsNormalization: false supportsDBT: false @@ -4011,9 +4187,9 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-monday:0.1.1" +- dockerImage: "airbyte/source-monday:0.1.2" spec: - documentationUrl: "https://docsurl.com" + documentationUrl: "https://docs.airbyte.io/integrations/sources/monday" connectionSpecification: $schema: "http://json-schema.org/draft-07/schema#" title: "Monday Spec" @@ -4024,8 +4200,8 @@ properties: api_token: type: "string" - description: "This is the API token to authenticate requests to Monday.\ - \ Profile picture (bottom left) => Admin => API" + title: "Personal Access Token" + description: "Access Token for making authenticated requests." airbyte_secret: true supportsNormalization: false supportsDBT: false @@ -4419,6 +4595,124 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-openweather:0.1.0" + spec: + documentationUrl: "https://docsurl.com" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Open Weather Spec" + type: "object" + required: + - "appid" + - "lat" + - "lon" + additionalProperties: false + properties: + lat: + title: "Latitude" + type: "string" + pattern: "^[-]?\\d{1,2}(\\.\\d+)?$" + examples: + - "45.7603" + - "-21.249107858038816" + description: "Latitude for which you want to get weather condition from.\ + \ (min -90, max 90)" + lon: + title: "Longitude" + type: "string" + pattern: "^[-]?\\d{1,3}(\\.\\d+)?$" + examples: + - "4.835659" + - "-70.39482074115321" + description: "Longitude for which you want to get weather condition from.\ + \ (min -180, max 180)" + appid: + title: "App ID" + type: "string" + description: "Your OpenWeather API Key. See here. The key is case sensitive." + airbyte_secret: true + units: + title: "Units" + type: "string" + description: "Units of measurement. standard, metric and imperial units\ + \ are available. If you do not use the units parameter, standard units\ + \ will be applied by default." + enum: + - "standard" + - "metric" + - "imperial" + examples: + - "standard" + - "metric" + - "imperial" + lang: + title: "Language" + type: "string" + description: "You can use lang parameter to get the output in your language.\ + \ The contents of the description field will be translated. See here for the list\ + \ of supported languages." + enum: + - "af" + - "al" + - "ar" + - "az" + - "bg" + - "ca" + - "cz" + - "da" + - "de" + - "el" + - "en" + - "eu" + - "fa" + - "fi" + - "fr" + - "gl" + - "he" + - "hi" + - "hr" + - "hu" + - "id" + - "it" + - "ja" + - "kr" + - "la" + - "lt" + - "mk" + - "no" + - "nl" + - "pl" + - "pt" + - "pt_br" + - "ro" + - "ru" + - "sv" + - "se" + - "sk" + - "sl" + - "sp" + - "es" + - "sr" + - "th" + - "tr" + - "ua" + - "uk" + - "vi" + - "zh_cn" + - "zh_tw" + - "zu" + examples: + - "en" + - "fr" + - "pt_br" + - "uk" + - "zh_cn" + - "zh_tw" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-oracle:0.3.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/oracle" @@ -4682,7 +4976,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-paypal-transaction:0.1.2" +- dockerImage: "airbyte/source-paypal-transaction:0.1.3" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/paypal-transactions" connectionSpecification: @@ -5826,7 +6120,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-shopify:0.1.25" +- dockerImage: "airbyte/source-shopify:0.1.26" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/shopify" connectionSpecification: @@ -5859,6 +6153,7 @@ - "client_id" - "client_secret" - "access_token" + - "auth_method" properties: auth_method: type: "string" @@ -5883,6 +6178,7 @@ type: "object" required: - "api_password" + - "auth_method" properties: auth_method: type: "string" @@ -6077,7 +6373,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-snapchat-marketing:0.1.3" +- dockerImage: "airbyte/source-snapchat-marketing:0.1.4" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/snapchat-marketing" connectionSpecification: @@ -6093,24 +6389,23 @@ client_id: title: "Client ID" type: "string" - description: "The Snapchat Client ID for API credentials." + description: "The Client ID of your Snapchat developer application." airbyte_secret: true client_secret: title: "Client Secret" type: "string" - description: "The Client Secret for a given Client ID." + description: "The Client Secret of your Snapchat developer application." airbyte_secret: true refresh_token: - title: "API Refresh Token" + title: "Refresh Token" type: "string" - description: "Refresh Token to get next api key after expiration. Is given\ - \ with API Key" + description: "Refresh Token to renew the expired Access Token." airbyte_secret: true start_date: title: "Start Date" type: "string" - description: "The start date to sync data. Leave blank for full sync. Format:\ - \ YYYY-MM-DD." + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." examples: - "2021-01-01" default: "1970-01-01" @@ -6242,7 +6537,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-strava:0.1.1" +- dockerImage: "airbyte/source-strava:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/strava" connectionSpecification: @@ -6257,6 +6552,12 @@ - "start_date" additionalProperties: false properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" client_id: type: "string" description: "The Client ID of your Strava developer application." @@ -6296,6 +6597,40 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] + advanced_auth: + auth_flow_type: "oauth2.0" + predicate_key: + - "auth_type" + predicate_value: "Client" + oauth_config_specification: + complete_oauth_output_specification: + type: "object" + additionalProperties: false + properties: + refresh_token: + type: "string" + path_in_connector_config: + - "refresh_token" + complete_oauth_server_input_specification: + type: "object" + additionalProperties: false + properties: + client_id: + type: "string" + client_secret: + type: "string" + complete_oauth_server_output_specification: + type: "object" + additionalProperties: false + properties: + client_id: + type: "string" + path_in_connector_config: + - "client_id" + client_secret: + type: "string" + path_in_connector_config: + - "client_secret" - dockerImage: "airbyte/source-stripe:0.1.25" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/stripe" @@ -6755,7 +7090,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-zendesk-support:0.1.10" +- dockerImage: "airbyte/source-zendesk-support:0.1.9" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/zendesk-support" connectionSpecification: diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java index 93917d9752b9..71974b048d6b 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java @@ -58,9 +58,12 @@ public JsonNode maskSecrets(final JsonNode obj, final JsonNode schema) { if (copy.has(key)) { ((ObjectNode) copy).put(key, SECRETS_MASK); } + } else if (canBeProcessed(fieldSchema) && copy.has(key)) { + ((ObjectNode) copy).set(key, maskSecrets(copy.get(key), fieldSchema)); } final var combinationKey = findJsonCombinationNode(fieldSchema); + if (combinationKey.isPresent() && copy.has(key)) { var combinationCopy = copy.get(key); final var arrayNode = (ArrayNode) fieldSchema.get(combinationKey.get()); @@ -111,8 +114,9 @@ public JsonNode copySecrets(final JsonNode src, final JsonNode dst, final JsonNo // We only copy the original secret if the destination object isn't attempting to overwrite it // i.e: if the value of the secret isn't set to the mask if (isSecret(fieldSchema) && src.has(key)) { - if (dst.has(key) && dst.get(key).asText().equals(SECRETS_MASK)) + if (dst.has(key) && dst.get(key).asText().equals(SECRETS_MASK)) { dstCopy.set(key, src.get(key)); + } } final var combinationKey = findJsonCombinationNode(fieldSchema); diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessorTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessorTest.java index 7678024f8c7f..d429ba4be638 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessorTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessorTest.java @@ -4,7 +4,7 @@ package io.airbyte.config.persistence.split_secrets; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; @@ -153,8 +153,98 @@ public class JsonSecretsProcessorTest { + " }\n" + " }"); + private final static String test = """ + { + "provider": { + "bucket": "bucket", + "endpoint": "", + "path_prefix": "", + "aws_access_key_id": "nothingtosee", + "aws_secret_access_key": "same" + } + } + """; + + private final static String testSpecs = + """ + { + "type": "object", + "title": "S3 Source Spec", + "required": [ + "dataset", + "path_pattern", + "provider" + ], + "properties": { + "provider": { + "type": "object", + "title": "S3: Amazon Web Services", + "required": [ + "bucket" + ], + "properties": { + "bucket": { + "type": "string", + "title": "Bucket", + "description": "Name of the S3 bucket where the file(s) exist." + }, + "use_ssl": { + "type": "boolean", + "title": "Use Ssl", + "description": "Is remote server using secure SSL/TLS connection" + }, + "endpoint": { + "type": "string", + "title": "Endpoint", + "default": "", + "description": "Endpoint to an S3 compatible service. Leave empty to use AWS." + }, + "path_prefix": { + "type": "string", + "title": "Path Prefix", + "default": "", + "description": "By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimise finding these in S3. This is optional but recommended if your bucket contains many folders/files." + }, + "verify_ssl_cert": { + "type": "boolean", + "title": "Verify Ssl Cert", + "description": "Allow self signed certificates" + }, + "aws_access_key_id": { + "type": "string", + "title": "Aws Access Key Id", + "description": "In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.", + "airbyte_secret": true + }, + "aws_secret_access_key": { + "type": "string", + "title": "Aws Secret Access Key", + "description": "In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.", + "airbyte_secret": true + } + } + } + } + } + """; + JsonSecretsProcessor processor = new JsonSecretsProcessor(); + @Test + public void testNestedSecrets() { + final JsonNode obj = Jsons.deserialize(test); + final JsonNode specObj = Jsons.deserialize(testSpecs); + final JsonNode sanitized = processor.maskSecrets(obj, specObj); + + final JsonNode expected = Jsons.jsonNode(ImmutableMap.builder() + .put("bucket", "bucket") + .put("endpoint", "") + .put("path_prefix", "") + .put("aws_access_key_id", JsonSecretsProcessor.SECRETS_MASK) + .put("aws_secret_access_key", JsonSecretsProcessor.SECRETS_MASK).build()); + assertEquals(expected, sanitized.get("provider")); + } + @Test public void testMaskSecrets() { final JsonNode obj = Jsons.jsonNode(ImmutableMap.builder() diff --git a/airbyte-e2e-testing/cypress/integration/destination.spec.js b/airbyte-e2e-testing/cypress/integration/destination.spec.js index bcbb32c9d469..8d64a08263d8 100644 --- a/airbyte-e2e-testing/cypress/integration/destination.spec.js +++ b/airbyte-e2e-testing/cypress/integration/destination.spec.js @@ -2,7 +2,7 @@ describe("Destination main actions", () => { it("Create new destination", () => { cy.createTestDestination("Test destination cypress"); - cy.url().should("include", `${Cypress.config().baseUrl}/destination/`); + cy.url().should("include", `/destination/`); }); it("Update destination", () => { diff --git a/airbyte-e2e-testing/cypress/integration/onboarding.spec.js b/airbyte-e2e-testing/cypress/integration/onboarding.spec.js index e693f5b58293..572d4ea99e36 100644 --- a/airbyte-e2e-testing/cypress/integration/onboarding.spec.js +++ b/airbyte-e2e-testing/cypress/integration/onboarding.spec.js @@ -1,13 +1,13 @@ describe("Preferences actions", () => { it("Should redirect to onboarding after email is entered", () => { - cy.visit("/"); - cy.url().should("include", `${Cypress.config().baseUrl}/preferences`); + cy.visit("/preferences"); + cy.url().should("include", `/preferences`); cy.fillEmail("test-email-onboarding@test-onboarding-domain.com"); cy.get("input[name=securityUpdates]").parent().click(); cy.submit(); - cy.url().should("equal", `${Cypress.config().baseUrl}/onboarding`); + cy.url().should("match", /.*\/onboarding/); }); }); diff --git a/airbyte-e2e-testing/cypress/integration/source.spec.js b/airbyte-e2e-testing/cypress/integration/source.spec.js index 29fdf5ee9ae6..581251d4445c 100644 --- a/airbyte-e2e-testing/cypress/integration/source.spec.js +++ b/airbyte-e2e-testing/cypress/integration/source.spec.js @@ -2,7 +2,7 @@ describe("Source main actions", () => { it("Create new source", () => { cy.createTestSource("Test source cypress"); - cy.url().should("include", `${Cypress.config().baseUrl}/source/`); + cy.url().should("include", `/source/`); }); //TODO: add update source on some other connector or create 1 more user for pg diff --git a/airbyte-e2e-testing/cypress/support/commands/common.js b/airbyte-e2e-testing/cypress/support/commands/common.js index 2e134e1b566f..caeb91aff28d 100644 --- a/airbyte-e2e-testing/cypress/support/commands/common.js +++ b/airbyte-e2e-testing/cypress/support/commands/common.js @@ -33,13 +33,13 @@ Cypress.Commands.add("openDestinationPage", () => { Cypress.Commands.add("openNewSourceForm", () => { cy.openSourcePage(); cy.get("button[data-id='new-source'").click(); - cy.url().should("eq", `${Cypress.config().baseUrl}/source/new-source`); + cy.url().should("include", `/source/new-source`); }) Cypress.Commands.add("openNewDestinationForm", () => { cy.openDestinationPage(); cy.get("button[data-id='new-destination'").click(); - cy.url().should("eq", `${Cypress.config().baseUrl}/destination/new-destination`); + cy.url().should("include", `/destination/new-destination`); }) Cypress.Commands.add("updateField", (field, value) => { diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java index 71bd8f110ef2..9a98616a3396 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java @@ -146,7 +146,7 @@ protected void acceptTracked(final AirbyteMessage message) throws Exception { // TODO use a more efficient way to compute bytes that doesn't require double serialization (records // are serialized again when writing to // the destination - long messageSizeInBytes = ByteUtils.getSizeInBytes(Jsons.serialize(recordMessage.getData())); + final long messageSizeInBytes = ByteUtils.getSizeInBytes(Jsons.serialize(recordMessage.getData())); if (bufferSizeInBytes + messageSizeInBytes >= maxQueueSizeInBytes) { flushQueueToDestination(); bufferSizeInBytes = 0; @@ -202,7 +202,7 @@ protected void close(final boolean hasFailed) throws Exception { } try { - // if no state was was emitted (i.e. full refresh), if there were still no failures, then we can + // if no state was emitted (i.e. full refresh), if there were still no failures, then we can // still succeed. if (lastFlushedState == null) { onClose.accept(hasFailed); @@ -211,7 +211,7 @@ protected void close(final boolean hasFailed) throws Exception { onClose.accept(false); } - // if one close succeeds without exception then we can emit the state record because it means its + // if onClose succeeds without exception then we can emit the state record because it means its // records were not only flushed, but committed. if (lastFlushedState != null) { outputRecordCollector.accept(lastFlushedState); diff --git a/airbyte-integrations/bases/base-standard-source-test-file/Dockerfile b/airbyte-integrations/bases/base-standard-source-test-file/Dockerfile index 4a10b77ce6ad..437c990d350f 100644 --- a/airbyte-integrations/bases/base-standard-source-test-file/Dockerfile +++ b/airbyte-integrations/bases/base-standard-source-test-file/Dockerfile @@ -21,9 +21,7 @@ ENV APPLICATION base-standard-source-test-file WORKDIR /app COPY entrypoint.sh . -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /app ENTRYPOINT ["/app/entrypoint.sh"] diff --git a/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md b/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md index 3b47b7a2b4f0..a0fc565f73c9 100644 --- a/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md +++ b/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.1.36 +Add assert that spec.json file does not have any `$ref` in it: [#8842](https://github.com/airbytehq/airbyte/pull/8842) + ## 0.1.32 Add info about skipped failed tests in /test command message on GitHub: [#8691](https://github.com/airbytehq/airbyte/pull/8691) diff --git a/airbyte-integrations/bases/source-acceptance-test/Dockerfile b/airbyte-integrations/bases/source-acceptance-test/Dockerfile index 4667d92e1dac..572f770bf6dd 100644 --- a/airbyte-integrations/bases/source-acceptance-test/Dockerfile +++ b/airbyte-integrations/bases/source-acceptance-test/Dockerfile @@ -33,7 +33,7 @@ COPY pytest.ini setup.py ./ COPY source_acceptance_test ./source_acceptance_test RUN pip install . -LABEL io.airbyte.version=0.1.34 +LABEL io.airbyte.version=0.1.38 LABEL io.airbyte.name=airbyte/source-acceptance-test ENTRYPOINT ["python", "-m", "pytest", "-p", "source_acceptance_test.plugin", "-r", "fEsx"] diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py index dbaaecc43b9e..adffd750e0ea 100644 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py +++ b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py @@ -2,6 +2,7 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # +import json import logging import re from collections import Counter, defaultdict @@ -41,6 +42,10 @@ def actual_connector_spec_fixture(request: BaseTest, docker_runner): request.spec_cache = spec return request.spec_cache + @pytest.fixture(name="connector_spec_dict") + def connector_spec_dict_fixture(request: BaseTest, actual_connector_spec): + return json.loads(actual_connector_spec.json()) + def test_match_expected( self, connector_spec: ConnectorSpecification, actual_connector_spec: ConnectorSpecification, connector_config: SecretDict ): @@ -69,6 +74,12 @@ def test_has_secret(self): def test_secret_never_in_the_output(self): """This test should be injected into any docker command it needs to know current config and spec""" + def test_defined_refs_exist_in_json_spec_file(self, connector_spec_dict: dict): + """Checking for the presence of unresolved `$ref`s values within each json spec file""" + check_result = find_key_inside_schema(schema_item=connector_spec_dict) + + assert not check_result, "Found unresolved `$refs` value in spec.json file" + def test_oauth_flow_parameters(self, actual_connector_spec: ConnectorSpecification): """ Check if connector has correct oauth flow parameters according to https://docs.airbyte.io/connector-development/connector-specification-reference diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/utils/connector_runner.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/utils/connector_runner.py index 28afee88f54e..3d88cd59148c 100644 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/utils/connector_runner.py +++ b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/utils/connector_runner.py @@ -115,24 +115,35 @@ def run(self, cmd, config=None, state=None, catalog=None, **kwargs) -> Iterable[ def read(cls, container: Container, command: str = None, with_ext: bool = True) -> Iterable[str]: """Reads connector's logs per line""" buffer = b"" - has_exception = False + exception = "" + line = "" for chunk in container.logs(stdout=True, stderr=True, stream=True, follow=True): + buffer += chunk - found = buffer.find(b"\n") - if found <= -1: - continue - line = buffer[:found].decode("utf-8") - if has_exception or "Traceback (most recent call last)" in line: - has_exception = True + while True: + # every chunk can include several lines + found = buffer.find(b"\n") + if found <= -1: + break + + line = buffer[: found + 1].decode("utf-8") + if len(exception) > 0 or line.startswith("Traceback (most recent call last)"): + exception += line + else: + yield line + buffer = buffer[found + 1 :] + + if buffer: + # send the latest chunk if exists + line = buffer.decode("utf-8") + if exception: + exception += line else: yield line - buffer = buffer[found + 1 :] - if not has_exception and buffer: - yield buffer.decode("utf-8") exit_status = container.wait() if exit_status["StatusCode"]: - error = buffer.decode("utf-8") if has_exception else exit_status["Error"] + error = exit_status["Error"] or exception or line logging.error(f"Docker container was failed, " f'code {exit_status["StatusCode"]}, error:\n{error}') if with_ext: raise ContainerError( diff --git a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_core.py b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_core.py index 81019080592b..becae418a437 100644 --- a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_core.py +++ b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_core.py @@ -20,6 +20,127 @@ from source_acceptance_test.tests.test_core import TestSpec as _TestSpec +@pytest.mark.parametrize( + "connector_spec, should_fail", + [ + ( + { + "connectionSpecification": { + "type": "object", + "properties": { + "client_id": {"type": "string"}, + "client_secret": {"type": "string"}, + "access_token": {"type": "string"}, + "refresh_token": {"type": "string"}, + "$ref": None, + }, + } + }, + True, + ), + ( + { + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["credentials", "auth_type"], + "predicate_value": "Client", + "oauth_config_specification": { + "complete_oauth_output_specification": { + "type": "object", + "properties": {"refresh_token": {"type": "string"}, "$ref": None}, + } + }, + } + }, + True, + ), + ( + { + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["credentials", "auth_type"], + "predicate_value": "Client", + "oauth_config_specification": { + "complete_oauth_server_input_specification": { + "type": "object", + "properties": {"refresh_token": {"type": "string"}, "$ref": None}, + } + }, + } + }, + True, + ), + ( + { + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["credentials", "auth_type"], + "predicate_value": "Client", + "oauth_config_specification": { + "complete_oauth_server_output_specification": { + "type": "object", + "properties": {"refresh_token": {"type": "string"}, "$ref": None}, + } + }, + } + }, + True, + ), + ( + { + "connectionSpecification": { + "type": "object", + "properties": { + "client_id": {"type": "string"}, + "client_secret": {"type": "string"}, + "access_token": {"type": "string"}, + "refresh_token": {"type": "string"}, + }, + } + }, + False, + ), + ( + { + "connectionSpecification": { + "type": "object", + "properties": { + "client_id": {"type": "string"}, + "client_secret": {"type": "string"}, + "access_token": {"type": "string"}, + "refresh_token": {"type": "string"}, + }, + }, + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["credentials", "auth_type"], + "predicate_value": "Client", + "oauth_config_specification": { + "complete_oauth_server_output_specification": { + "type": "object", + "properties": {"refresh_token": {"type": "string"}}, + } + }, + }, + }, + False, + ), + ({"$ref": None}, True), + ({"properties": {"user": {"$ref": None}}}, True), + ({"properties": {"user": {"$ref": "user.json"}}}, True), + ({"properties": {"user": {"type": "object", "properties": {"username": {"type": "string"}}}}}, False), + ({"properties": {"fake_items": {"type": "array", "items": {"$ref": "fake_item.json"}}}}, True), + ], +) +def test_ref_in_spec_schemas(connector_spec, should_fail): + t = _TestSpec() + if should_fail is True: + with pytest.raises(AssertionError): + t.test_defined_refs_exist_in_json_spec_file(connector_spec_dict=connector_spec) + else: + t.test_defined_refs_exist_in_json_spec_file(connector_spec_dict=connector_spec) + + @pytest.mark.parametrize( "schema, cursors, should_fail", [ diff --git a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_utils.py b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_utils.py index 50320605a7a1..f116a29bb51c 100644 --- a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_utils.py +++ b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_utils.py @@ -2,10 +2,16 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # +import random +import string from functools import partial +from typing import Iterable +from unittest.mock import Mock import pytest +from docker.errors import ContainerError from source_acceptance_test.utils.compare import make_hashable +from source_acceptance_test.utils.connector_runner import ConnectorRunner def not_sorted_data(): @@ -164,3 +170,82 @@ def test_exclude_fields(): output = map(serializer, data) for item in output: assert "organization_id" not in item + + +class MockContainer: + def __init__(self, status: dict, iter_logs: Iterable): + self.wait = Mock(return_value=status) + self.logs = Mock(return_value=iter(iter_logs)) + + class Image: + pass + + self.image = Image() + + +def binary_generator(lengths, last_line=None): + data = "" + for length in lengths: + data += "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(length)) + "\n" + data = data.encode() + chunk_size = random.randint(512, 1024) + + while len(data) > chunk_size: + yield data[:chunk_size] + data = data[chunk_size:] + yield data + if last_line: + yield ("bla-1234567890-bla\n" + last_line).encode() + + +def test_successful_logs_reading(): + line_count = 1234 + line_lengths = [random.randint(0, 1024 * 20) for _ in range(line_count)] + lines = [ + line for line in ConnectorRunner.read(container=MockContainer(status={"StatusCode": 0}, iter_logs=binary_generator(line_lengths))) + ] + assert line_count == len(lines) + for line, length in zip(lines, line_lengths): + assert len(line) - 1 == length + + +@pytest.mark.parametrize( + "traceback,container_error,last_line,expected_error", + ( + # container returns a some internal error + ( + "Traceback (most recent call last):\n File \"\", line 1, in \nKeyError: 'bbbb'", + "Some Container Error", + "Last Container Logs Line", + "Some Container Error", + ), + # container returns a raw traceback + ( + "Traceback (most recent call last):\n File \"\", line 1, in \nKeyError: 'bbbb'", + None, + "Last Container Logs Line", + "Traceback (most recent call last):\n File \"\", line 1, in \nKeyError: 'bbbb'", + ), + # container doesn't return any tracebacks or errors + ( + None, + None, + "Last Container Logs Line", + "Last Container Logs Line", + ), + ), +) +def test_failed_reading(traceback, container_error, last_line, expected_error): + line_count = 10 + line_lengths = [random.randint(0, 523) for _ in range(line_count)] + + with pytest.raises(ContainerError) as exc: + list( + ConnectorRunner.read( + container=MockContainer( + status={"StatusCode": 1, "Error": container_error}, iter_logs=binary_generator(line_lengths, traceback or last_line) + ) + ) + ) + + assert expected_error == exc.value.stderr diff --git a/airbyte-integrations/bases/standard-source-test/Dockerfile b/airbyte-integrations/bases/standard-source-test/Dockerfile index e2dbe3dd47e1..fe3264804578 100644 --- a/airbyte-integrations/bases/standard-source-test/Dockerfile +++ b/airbyte-integrations/bases/standard-source-test/Dockerfile @@ -21,9 +21,7 @@ ENV APPLICATION standard-source-test WORKDIR /app COPY entrypoint.sh . -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /app ENTRYPOINT ["/app/entrypoint.sh"] diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java index 760901e4f644..f4a2f53a068c 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java @@ -221,8 +221,8 @@ private static Map prepareResourceRequestMapBySystemProperties() if (memoryLimit.isBlank() || memoryLimit.isEmpty()) { memoryLimit = WorkerUtils.DEFAULT_RESOURCE_REQUIREMENTS.getMemoryLimit(); } - LOGGER.error("cpu limit -->> {}", cpuLimit); - LOGGER.error("memory limit -->> {}", memoryLimit); + LOGGER.info("Container CPU Limit = {}", cpuLimit); + LOGGER.info("Container Memory Limit = {}", memoryLimit); Map result = new HashMap<>(); result.put(CPU_REQUEST_FIELD_NAME, WorkerUtils.DEFAULT_RESOURCE_REQUIREMENTS.getCpuRequest()); result.put(CPU_LIMIT_FIELD_NAME, cpuLimit); diff --git a/airbyte-integrations/builds.md b/airbyte-integrations/builds.md index f5f82699baaa..e204cf7fb5ac 100644 --- a/airbyte-integrations/builds.md +++ b/airbyte-integrations/builds.md @@ -62,6 +62,7 @@ | MySQL | [![source-mysql](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-mysql%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-mysql) | | Notion | [![source-notion](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-notion%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-notion) | | OneSignal | [![source-onesignal](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-onesignal%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-onesignal) | +| OpenWeather | [![source-openweather](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-openweather%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-openweather) | | Oracle DB | [![source-oracle](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-oracle%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-oracle) | | Paypal Transaction | [![paypal-transaction](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-paypal-transaction%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-paypal-transaction) | | Paystack | [![source-paystack](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-paystack%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-paystack) | diff --git a/airbyte-integrations/connector-templates/destination-java/Dockerfile.hbs b/airbyte-integrations/connector-templates/destination-java/Dockerfile.hbs index 5e0ba28ec0da..0217e6d58c7e 100644 --- a/airbyte-integrations/connector-templates/destination-java/Dockerfile.hbs +++ b/airbyte-integrations/connector-templates/destination-java/Dockerfile.hbs @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-{{dashCase name}} -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/destination-{{dashCase name}} diff --git a/airbyte-integrations/connector-templates/source-java-jdbc/Dockerfile b/airbyte-integrations/connector-templates/source-java-jdbc/Dockerfile index 88d8a0591c53..bc882c7532d2 100644 --- a/airbyte-integrations/connector-templates/source-java-jdbc/Dockerfile +++ b/airbyte-integrations/connector-templates/source-java-jdbc/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-{{dashCase name}} -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte # Airbyte's build system uses these labels to know what to name and tag the docker images produced by this Dockerfile. LABEL io.airbyte.version=0.1.0 diff --git a/airbyte-integrations/connectors/destination-azure-blob-storage/Dockerfile b/airbyte-integrations/connectors/destination-azure-blob-storage/Dockerfile index 618eb625c9ef..76a71080c3e9 100644 --- a/airbyte-integrations/connectors/destination-azure-blob-storage/Dockerfile +++ b/airbyte-integrations/connectors/destination-azure-blob-storage/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-azure-blob-storage -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/destination-azure-blob-storage diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile index fd3d2e2e268e..78885b5710cb 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-bigquery-denormalized -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar +ADD build/distributions/${APPLICATION}*.tar /airbyte -RUN tar xf ${APPLICATION}.tar --strip-components=1 - -LABEL io.airbyte.version=0.1.10 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/destination-bigquery-denormalized diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/resources/spec.json index 8f060f7f20bc..7e9b1a8f2393 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/resources/spec.json @@ -35,27 +35,29 @@ "asia-northeast2", "asia-northeast3", "asia-south1", + "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", - "europe-central1", + "australia-southeast2", "europe-central2", "europe-north1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", - "europe-west5", "europe-west6", "northamerica-northeast1", + "northamerica-northeast2", "southamerica-east1", + "southamerica-west1", "us-central1", "us-east1", "us-east4", - "us-west-1", - "us-west-2", - "us-west-3", - "us-west-4" + "us-west1", + "us-west2", + "us-west3", + "us-west4" ] }, "credentials_json": { diff --git a/airbyte-integrations/connectors/destination-bigquery/Dockerfile b/airbyte-integrations/connectors/destination-bigquery/Dockerfile index d507eed69ceb..2340242c9d75 100644 --- a/airbyte-integrations/connectors/destination-bigquery/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-bigquery -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar +ADD build/distributions/${APPLICATION}*.tar /airbyte -RUN tar xf ${APPLICATION}.tar --strip-components=1 - -LABEL io.airbyte.version=0.5.0 +LABEL io.airbyte.version=0.5.1 LABEL io.airbyte.name=airbyte/destination-bigquery diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json index ea0a674dbe90..8b0f34047561 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/resources/spec.json @@ -44,27 +44,29 @@ "asia-northeast2", "asia-northeast3", "asia-south1", + "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", - "europe-central1", + "australia-southeast2", "europe-central2", "europe-north1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", - "europe-west5", "europe-west6", "northamerica-northeast1", + "northamerica-northeast2", "southamerica-east1", + "southamerica-west1", "us-central1", "us-east1", "us-east4", - "us-west-1", - "us-west-2", - "us-west-3", - "us-west-4" + "us-west1", + "us-west2", + "us-west3", + "us-west4" ] }, "credentials_json": { diff --git a/airbyte-integrations/connectors/destination-cassandra/Dockerfile b/airbyte-integrations/connectors/destination-cassandra/Dockerfile index 197bb25ec9fc..30ccca948e53 100644 --- a/airbyte-integrations/connectors/destination-cassandra/Dockerfile +++ b/airbyte-integrations/connectors/destination-cassandra/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-cassandra -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/destination-cassandra diff --git a/airbyte-integrations/connectors/destination-csv/Dockerfile b/airbyte-integrations/connectors/destination-csv/Dockerfile index fdaacb200ddb..e9b6b5e3697c 100644 --- a/airbyte-integrations/connectors/destination-csv/Dockerfile +++ b/airbyte-integrations/connectors/destination-csv/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-csv -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.2.8 LABEL io.airbyte.name=airbyte/destination-csv diff --git a/airbyte-integrations/connectors/destination-databricks/Dockerfile b/airbyte-integrations/connectors/destination-databricks/Dockerfile index 1ef415915e48..573e195913d1 100644 --- a/airbyte-integrations/connectors/destination-databricks/Dockerfile +++ b/airbyte-integrations/connectors/destination-databricks/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-databricks -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/destination-databricks diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopier.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopier.java index 65276e07936d..03b440a766eb 100644 --- a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopier.java +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksStreamCopier.java @@ -10,6 +10,7 @@ import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; +import io.airbyte.integrations.destination.jdbc.copy.s3.LegacyS3StreamCopier; import io.airbyte.integrations.destination.s3.S3DestinationConfig; import io.airbyte.integrations.destination.s3.parquet.S3ParquetFormatConfig; import io.airbyte.integrations.destination.s3.parquet.S3ParquetWriter; @@ -23,9 +24,8 @@ import org.slf4j.LoggerFactory; /** - * This implementation is similar to - * {@link io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopier}. The difference is that - * this implementation creates Parquet staging files, instead of CSV ones. + * This implementation is similar to {@link LegacyS3StreamCopier}. The difference is that this + * implementation creates Parquet staging files, instead of CSV ones. *

*

* It does the following operations: diff --git a/airbyte-integrations/connectors/destination-dynamodb/Dockerfile b/airbyte-integrations/connectors/destination-dynamodb/Dockerfile index 319c38ea3133..ed520b1c3148 100644 --- a/airbyte-integrations/connectors/destination-dynamodb/Dockerfile +++ b/airbyte-integrations/connectors/destination-dynamodb/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-dynamodb -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/destination-dynamodb diff --git a/airbyte-integrations/connectors/destination-e2e-test/Dockerfile b/airbyte-integrations/connectors/destination-e2e-test/Dockerfile index 76348c3209ca..b0b99f1f74ca 100644 --- a/airbyte-integrations/connectors/destination-e2e-test/Dockerfile +++ b/airbyte-integrations/connectors/destination-e2e-test/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-e2e-test -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar +ADD build/distributions/${APPLICATION}*.tar /airbyte -RUN tar xf ${APPLICATION}.tar --strip-components=1 - -LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.version=0.1.1 LABEL io.airbyte.name=airbyte/destination-e2e-test diff --git a/airbyte-integrations/connectors/destination-elasticsearch/Dockerfile b/airbyte-integrations/connectors/destination-elasticsearch/Dockerfile index ae3509fcadb7..d9c56e3ce9a1 100644 --- a/airbyte-integrations/connectors/destination-elasticsearch/Dockerfile +++ b/airbyte-integrations/connectors/destination-elasticsearch/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-elasticsearch -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/destination-elasticsearch diff --git a/airbyte-integrations/connectors/destination-gcs/Dockerfile b/airbyte-integrations/connectors/destination-gcs/Dockerfile index 23cfcd8c0633..14995b54941c 100644 --- a/airbyte-integrations/connectors/destination-gcs/Dockerfile +++ b/airbyte-integrations/connectors/destination-gcs/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-gcs -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.15 LABEL io.airbyte.name=airbyte/destination-gcs diff --git a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriter.java b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriter.java index 49c87bd72e5d..a75f29c60f2c 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriter.java +++ b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriter.java @@ -36,6 +36,7 @@ public class GcsAvroWriter extends BaseGcsWriter implements S3Writer { private final StreamTransferManager uploadManager; private final MultiPartOutputStream outputStream; private final DataFileWriter dataFileWriter; + private final String objectKey; public GcsAvroWriter(final GcsDestinationConfig config, final AmazonS3 s3Client, @@ -47,7 +48,7 @@ public GcsAvroWriter(final GcsDestinationConfig config, super(config, s3Client, configuredStream); final String outputFilename = BaseGcsWriter.getOutputFilename(uploadTimestamp, S3Format.AVRO); - final String objectKey = String.join("/", outputPrefix, outputFilename); + objectKey = String.join("/", outputPrefix, outputFilename); LOGGER.info("Full GCS path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), objectKey); @@ -85,4 +86,9 @@ protected void closeWhenFail() throws IOException { uploadManager.abort(); } + @Override + public String getOutputPath() { + return objectKey; + } + } diff --git a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvWriter.java b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvWriter.java index 4551a0b07dc1..d1fbc72fe611 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvWriter.java +++ b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/csv/GcsCsvWriter.java @@ -36,6 +36,7 @@ public class GcsCsvWriter extends BaseGcsWriter implements S3Writer { private final MultiPartOutputStream outputStream; private final CSVPrinter csvPrinter; private final String gcsCsvFileLocation; // this used in destination-bigquery (GCS upload type) + private final String objectKey; public GcsCsvWriter(final GcsDestinationConfig config, final AmazonS3 s3Client, @@ -48,7 +49,7 @@ public GcsCsvWriter(final GcsDestinationConfig config, this.csvSheetGenerator = CsvSheetGenerator.Factory.create(configuredStream.getStream().getJsonSchema(), formatConfig); final String outputFilename = BaseGcsWriter.getOutputFilename(uploadTimestamp, S3Format.CSV); - final String objectKey = String.join("/", outputPrefix, outputFilename); + objectKey = String.join("/", outputPrefix, outputFilename); gcsCsvFileLocation = String.format("gs://%s/%s", config.getBucketName(), objectKey); LOGGER.info("Full GCS path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), @@ -90,4 +91,9 @@ public CSVPrinter getCsvPrinter() { return csvPrinter; } + @Override + public String getOutputPath() { + return objectKey; + } + } diff --git a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlWriter.java b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlWriter.java index dc8eef4184ec..f98cd27c60ba 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlWriter.java +++ b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/jsonl/GcsJsonlWriter.java @@ -35,6 +35,7 @@ public class GcsJsonlWriter extends BaseGcsWriter implements S3Writer { private final StreamTransferManager uploadManager; private final MultiPartOutputStream outputStream; private final PrintWriter printWriter; + private final String objectKey; public GcsJsonlWriter(final GcsDestinationConfig config, final AmazonS3 s3Client, @@ -43,7 +44,7 @@ public GcsJsonlWriter(final GcsDestinationConfig config, super(config, s3Client, configuredStream); final String outputFilename = BaseGcsWriter.getOutputFilename(uploadTimestamp, S3Format.JSONL); - final String objectKey = String.join("/", outputPrefix, outputFilename); + objectKey = String.join("/", outputPrefix, outputFilename); LOGGER.info("Full GCS path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), objectKey); @@ -78,4 +79,9 @@ protected void closeWhenFail() { uploadManager.abort(); } + @Override + public String getOutputPath() { + return objectKey; + } + } diff --git a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/parquet/GcsParquetWriter.java b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/parquet/GcsParquetWriter.java index e72e3613108d..ae4373c12abf 100644 --- a/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/parquet/GcsParquetWriter.java +++ b/airbyte-integrations/connectors/destination-gcs/src/main/java/io/airbyte/integrations/destination/gcs/parquet/GcsParquetWriter.java @@ -6,7 +6,6 @@ import com.amazonaws.services.s3.AmazonS3; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; import io.airbyte.integrations.destination.gcs.GcsDestinationConfig; import io.airbyte.integrations.destination.gcs.credential.GcsHmacKeyCredentialConfig; import io.airbyte.integrations.destination.gcs.writer.BaseGcsWriter; @@ -37,10 +36,10 @@ public class GcsParquetWriter extends BaseGcsWriter implements S3Writer { private static final Logger LOGGER = LoggerFactory.getLogger(GcsParquetWriter.class); private static final ObjectMapper MAPPER = new ObjectMapper(); - private static final ObjectWriter WRITER = MAPPER.writer(); private final ParquetWriter parquetWriter; private final AvroRecordFactory avroRecordFactory; + private final String objectKey; public GcsParquetWriter(final GcsDestinationConfig config, final AmazonS3 s3Client, @@ -52,7 +51,7 @@ public GcsParquetWriter(final GcsDestinationConfig config, super(config, s3Client, configuredStream); final String outputFilename = BaseGcsWriter.getOutputFilename(uploadTimestamp, S3Format.PARQUET); - final String objectKey = String.join("/", outputPrefix, outputFilename); + objectKey = String.join("/", outputPrefix, outputFilename); LOGGER.info("Storage path for stream '{}': {}/{}", stream.getName(), config.getBucketName(), objectKey); final URI uri = new URI(String.format("s3a://%s/%s/%s", config.getBucketName(), outputPrefix, outputFilename)); @@ -109,4 +108,9 @@ public void close(final boolean hasFailed) throws IOException { } } + @Override + public String getOutputPath() { + return objectKey; + } + } diff --git a/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriterTest.java b/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriterTest.java new file mode 100644 index 000000000000..75616d186e79 --- /dev/null +++ b/airbyte-integrations/connectors/destination-gcs/src/test/java/io/airbyte/integrations/destination/gcs/avro/GcsAvroWriterTest.java @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.gcs.avro; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; + +import com.amazonaws.services.s3.AmazonS3; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.airbyte.integrations.destination.gcs.GcsDestinationConfig; +import io.airbyte.integrations.destination.s3.avro.S3AvroFormatConfig; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import java.io.IOException; +import java.sql.Timestamp; +import java.time.Instant; +import org.apache.avro.Schema; +import org.junit.jupiter.api.Test; + +class GcsAvroWriterTest { + + @Test + public void generatesCorrectObjectPath() throws IOException { + final GcsAvroWriter writer = new GcsAvroWriter( + new GcsDestinationConfig( + "fake-bucket", + "fake-bucketPath", + "fake-bucketRegion", + null, + new S3AvroFormatConfig(new ObjectMapper().createObjectNode())), + mock(AmazonS3.class, RETURNS_DEEP_STUBS), + new ConfiguredAirbyteStream() + .withStream(new AirbyteStream() + .withNamespace("fake-namespace") + .withName("fake-stream")), + Timestamp.from(Instant.ofEpochMilli(1234)), + mock(Schema.class), + null); + + assertEquals("fake-bucketPath/fake_namespace/fake_stream/1970_01_01_1234_0.avro", writer.getOutputPath()); + } + +} diff --git a/airbyte-integrations/connectors/destination-jdbc/Dockerfile b/airbyte-integrations/connectors/destination-jdbc/Dockerfile index 3607f5055c43..8188e37727de 100644 --- a/airbyte-integrations/connectors/destination-jdbc/Dockerfile +++ b/airbyte-integrations/connectors/destination-jdbc/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-jdbc -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.3.7 LABEL io.airbyte.name=airbyte/destination-jdbc diff --git a/airbyte-integrations/connectors/destination-jdbc/build.gradle b/airbyte-integrations/connectors/destination-jdbc/build.gradle index fb46472e57f0..a8161a22b016 100644 --- a/airbyte-integrations/connectors/destination-jdbc/build.gradle +++ b/airbyte-integrations/connectors/destination-jdbc/build.gradle @@ -19,6 +19,7 @@ dependencies { implementation 'com.fasterxml.jackson.core:jackson-databind' testImplementation "org.testcontainers:postgresql:1.15.3" + testImplementation "org.mockito:mockito-inline:4.1.0" integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation "org.testcontainers:postgresql:1.15.3" diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java index 0b4c67727600..ff36d0bbd671 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java @@ -29,7 +29,7 @@ public class CopyConsumerFactory { private static final Logger LOGGER = LoggerFactory.getLogger(CopyConsumerFactory.class); - private static final int MAX_BATCH_SIZE_BYTES = 1024 * 1024 * 1024 / 4; // 256 mib + private static final int MAX_BATCH_SIZE_BYTES = 1024 * 1024 * 1024 / 4; // 256 MiB public static AirbyteMessageConsumer create(final Consumer outputRecordCollector, final JdbcDatabase database, diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java index 8f91cbce3e4e..125993a2d221 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java @@ -61,7 +61,8 @@ public interface StreamCopier { /** * Creates the staging file and all the necessary items to write data to this file. * - * @return the name of the staging file + * @return A string that unqiuely identifies the file. E.g. the filename, or a unique suffix that is + * appended to a shared filename prefix */ String prepareStagingFile(); diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopier.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopier.java new file mode 100644 index 000000000000..c7fafa65a18e --- /dev/null +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopier.java @@ -0,0 +1,235 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.jdbc.copy.s3; + +import alex.mojaki.s3upload.MultiPartOutputStream; +import alex.mojaki.s3upload.StreamTransferManager; +import com.amazonaws.services.s3.AmazonS3; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.lang.Exceptions; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.jdbc.SqlOperations; +import io.airbyte.integrations.destination.jdbc.StagingFilenameGenerator; +import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.DestinationSyncMode; +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.Instant; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVPrinter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @deprecated See {@link S3StreamCopier} + */ +@Deprecated +public abstract class LegacyS3StreamCopier implements StreamCopier { + + private static final Logger LOGGER = LoggerFactory.getLogger(LegacyS3StreamCopier.class); + + private static final int DEFAULT_UPLOAD_THREADS = 10; // The S3 cli uses 10 threads by default. + private static final int DEFAULT_QUEUE_CAPACITY = DEFAULT_UPLOAD_THREADS; + // It is optimal to write every 10,000,000 records (BATCH_SIZE * DEFAULT_PART) to a new file. + // The BATCH_SIZE is defined in CopyConsumerFactory. + // The average size of such a file will be about 1 GB. + // This will make it easier to work with files and speed up the recording of large amounts of data. + // In addition, for a large number of records, we will not get a drop in the copy request to + // QUERY_TIMEOUT when + // the records from the file are copied to the staging table. + public static final int MAX_PARTS_PER_FILE = 1000; + + protected final AmazonS3 s3Client; + protected final S3DestinationConfig s3Config; + protected final String tmpTableName; + private final DestinationSyncMode destSyncMode; + protected final String schemaName; + protected final String streamName; + protected final JdbcDatabase db; + private final ExtendedNameTransformer nameTransformer; + private final SqlOperations sqlOperations; + protected final Set s3StagingFiles = new HashSet<>(); + private final Map multipartUploadManagers = new HashMap<>(); + private final Map outputStreams = new HashMap<>(); + private final Map csvPrinters = new HashMap<>(); + protected final String stagingFolder; + private final StagingFilenameGenerator filenameGenerator; + + public LegacyS3StreamCopier(final String stagingFolder, + final DestinationSyncMode destSyncMode, + final String schema, + final String streamName, + final AmazonS3 client, + final JdbcDatabase db, + final S3DestinationConfig s3Config, + final ExtendedNameTransformer nameTransformer, + final SqlOperations sqlOperations) { + this.destSyncMode = destSyncMode; + this.schemaName = schema; + this.streamName = streamName; + this.stagingFolder = stagingFolder; + this.db = db; + this.nameTransformer = nameTransformer; + this.sqlOperations = sqlOperations; + this.tmpTableName = nameTransformer.getTmpTableName(streamName); + this.s3Client = client; + this.s3Config = s3Config; + this.filenameGenerator = new StagingFilenameGenerator(streamName, MAX_PARTS_PER_FILE); + } + + private String prepareS3StagingFile() { + return String.join("/", stagingFolder, schemaName, filenameGenerator.getStagingFilename()); + } + + @Override + public String prepareStagingFile() { + final var name = prepareS3StagingFile(); + if (!s3StagingFiles.contains(name)) { + s3StagingFiles.add(name); + LOGGER.info("S3 upload part size: {} MB", s3Config.getPartSize()); + // The stream transfer manager lets us greedily stream into S3. The native AWS SDK does not + // have support for streaming multipart uploads; + // The alternative is first writing the entire output to disk before loading into S3. This is not + // feasible with large tables. + // Data is chunked into parts. A part is sent off to a queue to be uploaded once it has reached it's + // configured part size. + // Memory consumption is (numUploadThreads + queue capacity) * part size = (10 + 10) * 10 = 200 MB + // at current configurations. + final var manager = new StreamTransferManager(s3Config.getBucketName(), name, s3Client) + .numUploadThreads(DEFAULT_UPLOAD_THREADS) + .queueCapacity(DEFAULT_QUEUE_CAPACITY) + .partSize(s3Config.getPartSize()); + multipartUploadManagers.put(name, manager); + final var outputStream = manager.getMultiPartOutputStreams().get(0); + // We only need one output stream as we only have one input stream. This is reasonably performant. + // See the above comment. + outputStreams.put(name, outputStream); + final var writer = new PrintWriter(outputStream, true, StandardCharsets.UTF_8); + try { + csvPrinters.put(name, new CSVPrinter(writer, CSVFormat.DEFAULT)); + } catch (final IOException e) { + throw new RuntimeException(e); + } + } + return name; + } + + @Override + public void write(final UUID id, final AirbyteRecordMessage recordMessage, final String s3FileName) throws Exception { + if (csvPrinters.containsKey(s3FileName)) { + csvPrinters.get(s3FileName).printRecord(id, + Jsons.serialize(recordMessage.getData()), + Timestamp.from(Instant.ofEpochMilli(recordMessage.getEmittedAt()))); + } + } + + @Override + public void closeStagingUploader(final boolean hasFailed) throws Exception { + if (hasFailed) { + for (final var multipartUploadManager : multipartUploadManagers.values()) { + multipartUploadManager.abort(); + } + } + closeAndWaitForUpload(); + } + + @Override + public void createDestinationSchema() throws Exception { + LOGGER.info("Creating schema in destination if it doesn't exist: {}", schemaName); + sqlOperations.createSchemaIfNotExists(db, schemaName); + } + + @Override + public void createTemporaryTable() throws Exception { + LOGGER.info("Preparing tmp table in destination for stream: {}, schema: {}, tmp table name: {}.", streamName, schemaName, tmpTableName); + sqlOperations.createTableIfNotExists(db, schemaName, tmpTableName); + } + + @Override + public void copyStagingFileToTemporaryTable() throws Exception { + LOGGER.info("Starting copy to tmp table: {} in destination for stream: {}, schema: {}, .", tmpTableName, streamName, schemaName); + s3StagingFiles.forEach(s3StagingFile -> Exceptions.toRuntime(() -> { + copyS3CsvFileIntoTable(db, getFullS3Path(s3Config.getBucketName(), s3StagingFile), schemaName, tmpTableName, s3Config); + })); + LOGGER.info("Copy to tmp table {} in destination for stream {} complete.", tmpTableName, streamName); + } + + @Override + public String createDestinationTable() throws Exception { + final var destTableName = nameTransformer.getRawTableName(streamName); + LOGGER.info("Preparing table {} in destination.", destTableName); + sqlOperations.createTableIfNotExists(db, schemaName, destTableName); + LOGGER.info("Table {} in destination prepared.", tmpTableName); + + return destTableName; + } + + @Override + public String generateMergeStatement(final String destTableName) { + LOGGER.info("Preparing to merge tmp table {} to dest table: {}, schema: {}, in destination.", tmpTableName, destTableName, schemaName); + final var queries = new StringBuilder(); + if (destSyncMode.equals(DestinationSyncMode.OVERWRITE)) { + queries.append(sqlOperations.truncateTableQuery(db, schemaName, destTableName)); + LOGGER.info("Destination OVERWRITE mode detected. Dest table: {}, schema: {}, truncated.", destTableName, schemaName); + } + queries.append(sqlOperations.copyTableQuery(db, schemaName, tmpTableName, destTableName)); + return queries.toString(); + } + + @Override + public void removeFileAndDropTmpTable() throws Exception { + s3StagingFiles.forEach(s3StagingFile -> { + LOGGER.info("Begin cleaning s3 staging file {}.", s3StagingFile); + if (s3Client.doesObjectExist(s3Config.getBucketName(), s3StagingFile)) { + s3Client.deleteObject(s3Config.getBucketName(), s3StagingFile); + } + LOGGER.info("S3 staging file {} cleaned.", s3StagingFile); + }); + + LOGGER.info("Begin cleaning {} tmp table in destination.", tmpTableName); + sqlOperations.dropTableIfExists(db, schemaName, tmpTableName); + LOGGER.info("{} tmp table in destination cleaned.", tmpTableName); + } + + protected static String getFullS3Path(final String s3BucketName, final String s3StagingFile) { + return String.join("/", "s3:/", s3BucketName, s3StagingFile); + } + + /** + * Closes the printers/outputstreams and waits for any buffered uploads to complete. + */ + private void closeAndWaitForUpload() throws IOException { + LOGGER.info("Uploading remaining data for {} stream.", streamName); + for (final var csvPrinter : csvPrinters.values()) { + csvPrinter.close(); + } + for (final var outputStream : outputStreams.values()) { + outputStream.close(); + } + for (final var multipartUploadManager : multipartUploadManagers.values()) { + multipartUploadManager.complete(); + } + LOGGER.info("All data for {} stream uploaded.", streamName); + } + + public abstract void copyS3CsvFileIntoTable(JdbcDatabase database, + String s3FileLocation, + String schema, + String tableName, + S3DestinationConfig s3Config) + throws SQLException; + +} diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopierFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopierFactory.java new file mode 100644 index 000000000000..f6b4b957d086 --- /dev/null +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopierFactory.java @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.jdbc.copy.s3; + +import com.amazonaws.services.s3.AmazonS3; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.jdbc.SqlOperations; +import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; +import io.airbyte.integrations.destination.jdbc.copy.StreamCopierFactory; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.DestinationSyncMode; + +/** + * See {@link S3StreamCopierFactory} instead. + */ +@Deprecated +public abstract class LegacyS3StreamCopierFactory implements StreamCopierFactory { + + /** + * Used by the copy consumer. + */ + @Override + public StreamCopier create(final String configuredSchema, + final S3DestinationConfig s3Config, + final String stagingFolder, + final ConfiguredAirbyteStream configuredStream, + final ExtendedNameTransformer nameTransformer, + final JdbcDatabase db, + final SqlOperations sqlOperations) { + try { + final AirbyteStream stream = configuredStream.getStream(); + final DestinationSyncMode syncMode = configuredStream.getDestinationSyncMode(); + final String schema = StreamCopierFactory.getSchema(stream.getNamespace(), configuredSchema, nameTransformer); + final AmazonS3 s3Client = s3Config.getS3Client(); + + return create(stagingFolder, syncMode, schema, stream.getName(), s3Client, db, s3Config, nameTransformer, sqlOperations); + } catch (final Exception e) { + throw new RuntimeException(e); + } + } + + /** + * For specific copier suppliers to implement. + */ + public abstract StreamCopier create(String stagingFolder, + DestinationSyncMode syncMode, + String schema, + String streamName, + AmazonS3 s3Client, + JdbcDatabase db, + S3DestinationConfig s3Config, + ExtendedNameTransformer nameTransformer, + SqlOperations sqlOperations) + throws Exception; + +} diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfig.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfig.java new file mode 100644 index 000000000000..bfce8529dfce --- /dev/null +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfig.java @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.jdbc.copy.s3; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; + +/** + * S3 copy destinations need an S3DestinationConfig to configure the basic upload behavior. We also + * want additional flags to configure behavior that only applies to the copy-to-S3 + + * load-into-warehouse portion. Currently this is just purgeStagingData, but this may expand. + */ +public record S3CopyConfig(boolean purgeStagingData, S3DestinationConfig s3Config) { + + public static boolean shouldPurgeStagingData(final JsonNode config) { + if (config.get("purge_staging_data") == null) { + return true; + } else { + return config.get("purge_staging_data").asBoolean(); + } + } + +} diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopier.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopier.java index ee48d8dd7151..a8492074571a 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopier.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopier.java @@ -4,32 +4,26 @@ package io.airbyte.integrations.destination.jdbc.copy.s3; -import alex.mojaki.s3upload.MultiPartOutputStream; -import alex.mojaki.s3upload.StreamTransferManager; import com.amazonaws.services.s3.AmazonS3; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; -import io.airbyte.integrations.destination.jdbc.StagingFilenameGenerator; import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.integrations.destination.s3.csv.S3CsvFormatConfig; +import io.airbyte.integrations.destination.s3.csv.S3CsvWriter; +import io.airbyte.integrations.destination.s3.csv.StagingDatabaseCsvSheetGenerator; +import io.airbyte.integrations.destination.s3.writer.S3Writer; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.protocol.models.DestinationSyncMode; import java.io.IOException; -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; import java.sql.SQLException; import java.sql.Timestamp; -import java.time.Instant; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; -import java.util.Set; import java.util.UUID; import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,14 +33,6 @@ public abstract class S3StreamCopier implements StreamCopier { private static final int DEFAULT_UPLOAD_THREADS = 10; // The S3 cli uses 10 threads by default. private static final int DEFAULT_QUEUE_CAPACITY = DEFAULT_UPLOAD_THREADS; - // It is optimal to write every 10,000,000 records (BATCH_SIZE * DEFAULT_PART) to a new file. - // The BATCH_SIZE is defined in CopyConsumerFactory. - // The average size of such a file will be about 1 GB. - // This will make it easier to work with files and speed up the recording of large amounts of data. - // In addition, for a large number of records, we will not get a drop in the copy request to - // QUERY_TIMEOUT when - // the records from the file are copied to the staging table. - public static final int MAX_PARTS_PER_FILE = 1000; protected final AmazonS3 s3Client; protected final S3DestinationConfig s3Config; @@ -57,91 +43,93 @@ public abstract class S3StreamCopier implements StreamCopier { protected final JdbcDatabase db; private final ExtendedNameTransformer nameTransformer; private final SqlOperations sqlOperations; - protected final Set s3StagingFiles = new HashSet<>(); - private final Map multipartUploadManagers = new HashMap<>(); - private final Map outputStreams = new HashMap<>(); - private final Map csvPrinters = new HashMap<>(); - private final String s3FileName; + private final ConfiguredAirbyteStream configuredAirbyteStream; + private final Timestamp uploadTime; protected final String stagingFolder; - private final StagingFilenameGenerator filenameGenerator; + protected final Map stagingWritersByFile = new HashMap<>(); + private final boolean purgeStagingData; + // The number of batches of records that will be inserted into each file. + private final int maxPartsPerFile; + // The number of batches inserted into the current file. + private int partsAddedToCurrentFile; + private String currentFile; + + /** + * @param maxPartsPerFile The number of "chunks" of requests to add into each file. Each chunk can + * be up to 256 MiB (see CopyConsumerFactory#MAX_BATCH_SIZE_BYTES). For example, Redshift + * recommends at most 1 GiB per file, so you would want maxPartsPerFile = 4 (because 4 * + * 256MiB = 1 GiB). + */ public S3StreamCopier(final String stagingFolder, - final DestinationSyncMode destSyncMode, final String schema, - final String streamName, - final String s3FileName, final AmazonS3 client, final JdbcDatabase db, - final S3DestinationConfig s3Config, + final S3CopyConfig config, final ExtendedNameTransformer nameTransformer, - final SqlOperations sqlOperations) { - this.destSyncMode = destSyncMode; + final SqlOperations sqlOperations, + final ConfiguredAirbyteStream configuredAirbyteStream, + final Timestamp uploadTime, + final int maxPartsPerFile) { + this.destSyncMode = configuredAirbyteStream.getDestinationSyncMode(); this.schemaName = schema; - this.streamName = streamName; - this.s3FileName = s3FileName; + this.streamName = configuredAirbyteStream.getStream().getName(); this.stagingFolder = stagingFolder; this.db = db; this.nameTransformer = nameTransformer; this.sqlOperations = sqlOperations; - this.tmpTableName = nameTransformer.getTmpTableName(streamName); + this.configuredAirbyteStream = configuredAirbyteStream; + this.uploadTime = uploadTime; + this.tmpTableName = nameTransformer.getTmpTableName(this.streamName); this.s3Client = client; - this.s3Config = s3Config; - this.filenameGenerator = new StagingFilenameGenerator(streamName, MAX_PARTS_PER_FILE); - } + this.s3Config = config.s3Config(); + this.purgeStagingData = config.purgeStagingData(); - private String prepareS3StagingFile() { - return String.join("/", stagingFolder, schemaName, filenameGenerator.getStagingFilename()); + this.maxPartsPerFile = maxPartsPerFile; + this.partsAddedToCurrentFile = 0; } @Override public String prepareStagingFile() { - final var name = prepareS3StagingFile(); - if (!s3StagingFiles.contains(name)) { - s3StagingFiles.add(name); + if (partsAddedToCurrentFile == 0) { LOGGER.info("S3 upload part size: {} MB", s3Config.getPartSize()); - // The stream transfer manager lets us greedily stream into S3. The native AWS SDK does not - // have support for streaming multipart uploads; - // The alternative is first writing the entire output to disk before loading into S3. This is not - // feasible with large tables. - // Data is chunked into parts. A part is sent off to a queue to be uploaded once it has reached it's - // configured part size. - // Memory consumption is queue capacity * part size = 10 * 10 = 100 MB at current configurations. - final var manager = new StreamTransferManager(s3Config.getBucketName(), name, s3Client) - .numUploadThreads(DEFAULT_UPLOAD_THREADS) - .queueCapacity(DEFAULT_QUEUE_CAPACITY) - .partSize(s3Config.getPartSize()); - multipartUploadManagers.put(name, manager); - final var outputStream = manager.getMultiPartOutputStreams().get(0); - // We only need one output stream as we only have one input stream. This is reasonably performant. - // See the above comment. - outputStreams.put(name, outputStream); - final var writer = new PrintWriter(outputStream, true, StandardCharsets.UTF_8); + try { - csvPrinters.put(name, new CSVPrinter(writer, CSVFormat.DEFAULT)); + final S3CsvWriter writer = new S3CsvWriter.Builder( + // The Flattening value is actually ignored, because we pass an explicit CsvSheetGenerator. So just + // pass in null. + s3Config.cloneWithFormatConfig(new S3CsvFormatConfig(null, (long) s3Config.getPartSize())), + s3Client, + configuredAirbyteStream, + uploadTime) + .uploadThreads(DEFAULT_UPLOAD_THREADS) + .queueCapacity(DEFAULT_QUEUE_CAPACITY) + .csvSettings(CSVFormat.DEFAULT) + .withHeader(false) + .csvSheetGenerator(new StagingDatabaseCsvSheetGenerator()) + .build(); + currentFile = writer.getOutputPath(); + stagingWritersByFile.put(currentFile, writer); } catch (final IOException e) { throw new RuntimeException(e); } } - return name; + partsAddedToCurrentFile = (partsAddedToCurrentFile + 1) % maxPartsPerFile; + return currentFile; } @Override - public void write(final UUID id, final AirbyteRecordMessage recordMessage, final String s3FileName) throws Exception { - if (csvPrinters.containsKey(s3FileName)) { - csvPrinters.get(s3FileName).printRecord(id, - Jsons.serialize(recordMessage.getData()), - Timestamp.from(Instant.ofEpochMilli(recordMessage.getEmittedAt()))); + public void write(final UUID id, final AirbyteRecordMessage recordMessage, final String filename) throws Exception { + if (stagingWritersByFile.containsKey(filename)) { + stagingWritersByFile.get(filename).write(id, recordMessage); } } @Override public void closeStagingUploader(final boolean hasFailed) throws Exception { - if (hasFailed) { - for (final var multipartUploadManager : multipartUploadManagers.values()) { - multipartUploadManager.abort(); - } + for (final S3Writer writer : stagingWritersByFile.values()) { + writer.close(hasFailed); } - closeAndWaitForUpload(); } @Override @@ -159,9 +147,10 @@ public void createTemporaryTable() throws Exception { @Override public void copyStagingFileToTemporaryTable() throws Exception { LOGGER.info("Starting copy to tmp table: {} in destination for stream: {}, schema: {}, .", tmpTableName, streamName, schemaName); - s3StagingFiles.forEach(s3StagingFile -> Exceptions.toRuntime(() -> { - copyS3CsvFileIntoTable(db, getFullS3Path(s3Config.getBucketName(), s3StagingFile), schemaName, tmpTableName, s3Config); - })); + for (final Map.Entry entry : stagingWritersByFile.entrySet()) { + final String objectKey = entry.getValue().getOutputPath(); + copyS3CsvFileIntoTable(db, getFullS3Path(s3Config.getBucketName(), objectKey), schemaName, tmpTableName, s3Config); + } LOGGER.info("Copy to tmp table {} in destination for stream {} complete.", tmpTableName, streamName); } @@ -189,13 +178,18 @@ public String generateMergeStatement(final String destTableName) { @Override public void removeFileAndDropTmpTable() throws Exception { - s3StagingFiles.forEach(s3StagingFile -> { - LOGGER.info("Begin cleaning s3 staging file {}.", s3StagingFile); - if (s3Client.doesObjectExist(s3Config.getBucketName(), s3StagingFile)) { - s3Client.deleteObject(s3Config.getBucketName(), s3StagingFile); + if (purgeStagingData) { + for (final Map.Entry entry : stagingWritersByFile.entrySet()) { + final String suffix = entry.getKey(); + final String objectKey = entry.getValue().getOutputPath(); + + LOGGER.info("Begin cleaning s3 staging file {}.", objectKey); + if (s3Client.doesObjectExist(s3Config.getBucketName(), objectKey)) { + s3Client.deleteObject(s3Config.getBucketName(), objectKey); + } + LOGGER.info("S3 staging file {} cleaned.", suffix); } - LOGGER.info("S3 staging file {} cleaned.", s3StagingFile); - }); + } LOGGER.info("Begin cleaning {} tmp table in destination.", tmpTableName); sqlOperations.dropTableIfExists(db, schemaName, tmpTableName); @@ -206,23 +200,6 @@ protected static String getFullS3Path(final String s3BucketName, final String s3 return String.join("/", "s3:/", s3BucketName, s3StagingFile); } - /** - * Closes the printers/outputstreams and waits for any buffered uploads to complete. - */ - private void closeAndWaitForUpload() throws IOException { - LOGGER.info("Uploading remaining data for {} stream.", streamName); - for (final var csvPrinter : csvPrinters.values()) { - csvPrinter.close(); - } - for (final var outputStream : outputStreams.values()) { - outputStream.close(); - } - for (final var multipartUploadManager : multipartUploadManagers.values()) { - multipartUploadManager.complete(); - } - LOGGER.info("All data for {} stream uploaded.", streamName); - } - public abstract void copyS3CsvFileIntoTable(JdbcDatabase database, String s3FileLocation, String schema, diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopierFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopierFactory.java index 5d206562ae02..6aad77fcc951 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopierFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopierFactory.java @@ -10,19 +10,17 @@ import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; import io.airbyte.integrations.destination.jdbc.copy.StreamCopierFactory; -import io.airbyte.integrations.destination.s3.S3DestinationConfig; import io.airbyte.protocol.models.AirbyteStream; import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.DestinationSyncMode; -public abstract class S3StreamCopierFactory implements StreamCopierFactory { +public abstract class S3StreamCopierFactory implements StreamCopierFactory { /** * Used by the copy consumer. */ @Override public StreamCopier create(final String configuredSchema, - final S3DestinationConfig s3Config, + final S3CopyConfig config, final String stagingFolder, final ConfiguredAirbyteStream configuredStream, final ExtendedNameTransformer nameTransformer, @@ -30,11 +28,10 @@ public StreamCopier create(final String configuredSchema, final SqlOperations sqlOperations) { try { final AirbyteStream stream = configuredStream.getStream(); - final DestinationSyncMode syncMode = configuredStream.getDestinationSyncMode(); final String schema = StreamCopierFactory.getSchema(stream.getNamespace(), configuredSchema, nameTransformer); - final AmazonS3 s3Client = s3Config.getS3Client(); + final AmazonS3 s3Client = config.s3Config().getS3Client(); - return create(stagingFolder, syncMode, schema, stream.getName(), s3Client, db, s3Config, nameTransformer, sqlOperations); + return create(stagingFolder, schema, s3Client, db, config, nameTransformer, sqlOperations, configuredStream); } catch (final Exception e) { throw new RuntimeException(e); } @@ -43,15 +40,14 @@ public StreamCopier create(final String configuredSchema, /** * For specific copier suppliers to implement. */ - public abstract StreamCopier create(String stagingFolder, - DestinationSyncMode syncMode, - String schema, - String streamName, - AmazonS3 s3Client, - JdbcDatabase db, - S3DestinationConfig s3Config, - ExtendedNameTransformer nameTransformer, - SqlOperations sqlOperations) + protected abstract StreamCopier create(String stagingFolder, + String schema, + AmazonS3 s3Client, + JdbcDatabase db, + S3CopyConfig config, + ExtendedNameTransformer nameTransformer, + SqlOperations sqlOperations, + ConfiguredAirbyteStream configuredStream) throws Exception; } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopierTest.java b/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopierTest.java new file mode 100644 index 000000000000..7326941de3e5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/LegacyS3StreamCopierTest.java @@ -0,0 +1,280 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.jdbc.copy.s3; + +import static java.util.Collections.singletonList; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.mockConstruction; +import static org.mockito.Mockito.verify; + +import alex.mojaki.s3upload.MultiPartOutputStream; +import alex.mojaki.s3upload.StreamTransferManager; +import com.amazonaws.services.s3.AmazonS3Client; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.jdbc.SqlOperations; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.DestinationSyncMode; +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; +import java.sql.Timestamp; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.UUID; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.MockedConstruction; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * IF YOU'RE SEEING WEIRD BEHAVIOR INVOLVING MOCKED OBJECTS: double-check the mockConstruction() + * call in setup(). You might need to update the methods being mocked. + *

+ * Tests to help define what the legacy S3 stream copier did. + *

+ * Does not verify SQL operations, as they're fairly transparent. + *

+ * A lot of this code is duplicated in other places (S3StreamCopierTest, S3CsvWriterTest, + * RedshiftStreamCopierTest). This is intentional, as eventually we'd like to delete the + * LegacyS3StreamCopier along with this file. + */ +public class LegacyS3StreamCopierTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(LegacyS3StreamCopierTest.class); + + private static final int PART_SIZE = 5; + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + private AmazonS3Client s3Client; + private JdbcDatabase db; + private SqlOperations sqlOperations; + private LegacyS3StreamCopier copier; + + private MockedConstruction streamTransferManagerMockedConstruction; + private List outputStreams; + + private List copyArguments; + + private record CopyArguments(JdbcDatabase database, + String s3FileLocation, + String schema, + String tableName, + S3DestinationConfig s3Config) { + + } + + @BeforeEach + public void setup() { + s3Client = mock(AmazonS3Client.class); + db = mock(JdbcDatabase.class); + sqlOperations = mock(SqlOperations.class); + + outputStreams = new ArrayList<>(); + copyArguments = new ArrayList<>(); + + // This is basically RETURNS_SELF, except with getMultiPartOutputStreams configured correctly. + // Other non-void methods (e.g. toString()) will return null. + streamTransferManagerMockedConstruction = mockConstruction( + StreamTransferManager.class, + (mock, context) -> { + doReturn(mock).when(mock).numUploadThreads(anyInt()); + doReturn(mock).when(mock).queueCapacity(anyInt()); + doReturn(mock).when(mock).partSize(anyLong()); + + // We can't write a fake MultiPartOutputStream, because it doesn't have a public constructor. + // So instead, we'll build a mock that captures its data into a ByteArrayOutputStream. + final MultiPartOutputStream stream = mock(MultiPartOutputStream.class); + doReturn(singletonList(stream)).when(mock).getMultiPartOutputStreams(); + final ByteArrayOutputStream capturer = new ByteArrayOutputStream(); + outputStreams.add(capturer); + doAnswer(invocation -> { + capturer.write((int) invocation.getArgument(0)); + return null; + }).when(stream).write(anyInt()); + doAnswer(invocation -> { + capturer.write(invocation.getArgument(0)); + return null; + }).when(stream).write(any(byte[].class)); + doAnswer(invocation -> { + capturer.write(invocation.getArgument(0), invocation.getArgument(1), invocation.getArgument(2)); + return null; + }).when(stream).write(any(byte[].class), anyInt(), anyInt()); + }); + + copier = new LegacyS3StreamCopier( + // In reality, this is normally a UUID - see CopyConsumerFactory#createWriteConfigs + "fake-staging-folder", + DestinationSyncMode.OVERWRITE, + "fake-schema", + "fake-stream", + s3Client, + db, + new S3DestinationConfig( + "fake-endpoint", + "fake-bucket", + null, + "fake-region", + "fake-access-key-id", + "fake-secret-access-key", + PART_SIZE, + null), + new ExtendedNameTransformer(), + sqlOperations) { + + @Override + public void copyS3CsvFileIntoTable( + final JdbcDatabase database, + final String s3FileLocation, + final String schema, + final String tableName, + final S3DestinationConfig s3Config) { + copyArguments.add(new CopyArguments(database, s3FileLocation, schema, tableName, s3Config)); + } + + }; + } + + @AfterEach + public void teardown() { + streamTransferManagerMockedConstruction.close(); + } + + @Test + public void createSequentialStagingFiles_when_multipleFilesRequested() { + // When we call prepareStagingFile() the first time, it should create exactly one upload manager. + // The next (MAX_PARTS_PER_FILE - 1) invocations should reuse that same upload manager. + for (var i = 0; i < LegacyS3StreamCopier.MAX_PARTS_PER_FILE; i++) { + final String file = copier.prepareStagingFile(); + assertEquals("fake-staging-folder/fake-schema/fake-stream_00000", file, "preparing file number " + i); + final List firstManagers = streamTransferManagerMockedConstruction.constructed(); + final StreamTransferManager firstManager = firstManagers.get(0); + verify(firstManager).partSize(PART_SIZE); + assertEquals(1, firstManagers.size()); + } + + // Now that we've hit the MAX_PARTS_PER_FILE, we should start a new upload + final String secondFile = copier.prepareStagingFile(); + assertEquals("fake-staging-folder/fake-schema/fake-stream_00001", secondFile); + final List secondManagers = streamTransferManagerMockedConstruction.constructed(); + final StreamTransferManager secondManager = secondManagers.get(1); + verify(secondManager).partSize(PART_SIZE); + assertEquals(2, secondManagers.size()); + } + + @Test + public void closesS3Upload_when_stagingUploaderClosedSuccessfully() throws Exception { + copier.prepareStagingFile(); + + copier.closeStagingUploader(false); + + final List managers = streamTransferManagerMockedConstruction.constructed(); + final StreamTransferManager manager = managers.get(0); + verify(manager).complete(); + } + + @Test + public void closesS3Upload_when_stagingUploaderClosedFailingly() throws Exception { + copier.prepareStagingFile(); + + copier.closeStagingUploader(true); + + final List managers = streamTransferManagerMockedConstruction.constructed(); + final StreamTransferManager manager = managers.get(0); + verify(manager).abort(); + } + + @Test + public void deletesStagingFiles() throws Exception { + final String file = copier.prepareStagingFile(); + doReturn(true).when(s3Client).doesObjectExist("fake-bucket", file); + + copier.removeFileAndDropTmpTable(); + + verify(s3Client).deleteObject("fake-bucket", file); + } + + @Test + public void writesContentsCorrectly() throws Exception { + final String file1 = copier.prepareStagingFile(); + for (int i = 0; i < LegacyS3StreamCopier.MAX_PARTS_PER_FILE - 1; i++) { + copier.prepareStagingFile(); + } + copier.write( + UUID.fromString("f6767f7d-ce1e-45cc-92db-2ad3dfdd088e"), + new AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"foo\": 73}")) + .withEmittedAt(1234L), + file1); + copier.write( + UUID.fromString("2b95a13f-d54f-4370-a712-1c7bf2716190"), + new AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"bar\": 84}")) + .withEmittedAt(2345L), + file1); + + final String file2 = copier.prepareStagingFile(); + copier.write( + UUID.fromString("24eba873-de57-4901-9e1e-2393334320fb"), + new AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"asd\": 95}")) + .withEmittedAt(3456L), + file2); + + copier.closeStagingUploader(false); + + // carriage returns are required b/c RFC4180 requires it :( + assertEquals( + String.format( + """ + f6767f7d-ce1e-45cc-92db-2ad3dfdd088e,"{""foo"":73}",%s\r + 2b95a13f-d54f-4370-a712-1c7bf2716190,"{""bar"":84}",%s\r + """, + Timestamp.from(Instant.ofEpochMilli(1234)), + Timestamp.from(Instant.ofEpochMilli(2345))), + outputStreams.get(0).toString(StandardCharsets.UTF_8)); + assertEquals( + String.format( + "24eba873-de57-4901-9e1e-2393334320fb,\"{\"\"asd\"\":95}\",%s\r\n", + Timestamp.from(Instant.ofEpochMilli(3456))), + outputStreams.get(1).toString(StandardCharsets.UTF_8)); + } + + @Test + public void copiesCorrectFilesToTable() throws Exception { + // Generate two files + for (int i = 0; i < LegacyS3StreamCopier.MAX_PARTS_PER_FILE + 1; i++) { + copier.prepareStagingFile(); + } + + copier.copyStagingFileToTemporaryTable(); + + assertEquals(2, copyArguments.size(), "Number of invocations was actually " + copyArguments.size() + ". Arguments were " + copyArguments); + + // S3StreamCopier operates on these from a HashMap, so need to sort them in order to assert in a + // sane way. + final List sortedArgs = copyArguments.stream().sorted(Comparator.comparing(arg -> arg.s3FileLocation)).toList(); + for (int i = 0; i < sortedArgs.size(); i++) { + LOGGER.info("Checking arguments for index {}", i); + final CopyArguments args = sortedArgs.get(i); + assertEquals(String.format("s3://fake-bucket/fake-staging-folder/fake-schema/fake-stream_%05d", i), args.s3FileLocation); + assertEquals("fake-schema", args.schema); + assertTrue(args.tableName.endsWith("fake_stream"), "Table name was actually " + args.tableName); + } + } + +} diff --git a/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfigTest.java b/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfigTest.java new file mode 100644 index 000000000000..4622a985ff11 --- /dev/null +++ b/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3CopyConfigTest.java @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.jdbc.copy.s3; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import org.junit.jupiter.api.Test; + +public class S3CopyConfigTest { + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + @Test + public void setsDefaultValues() throws IOException { + final boolean purgeStagingData = S3CopyConfig.shouldPurgeStagingData(OBJECT_MAPPER.readTree("{}")); + + assertTrue(purgeStagingData); + } + + @Test + public void parsesPurgeStagingDataCorrectly() throws IOException { + final boolean purgeStagingData = S3CopyConfig.shouldPurgeStagingData(OBJECT_MAPPER.readTree( + """ + { + "purge_staging_data": false + } + """)); + + assertFalse(purgeStagingData); + } + +} diff --git a/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopierTest.java b/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopierTest.java new file mode 100644 index 000000000000..2ddab245b7de --- /dev/null +++ b/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopierTest.java @@ -0,0 +1,282 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.jdbc.copy.s3; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.mockConstruction; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +import com.amazonaws.services.s3.AmazonS3Client; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.jdbc.SqlOperations; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.integrations.destination.s3.csv.CsvSheetGenerator; +import io.airbyte.integrations.destination.s3.csv.S3CsvFormatConfig; +import io.airbyte.integrations.destination.s3.csv.S3CsvWriter; +import io.airbyte.integrations.destination.s3.csv.StagingDatabaseCsvSheetGenerator; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.DestinationSyncMode; +import java.sql.Timestamp; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import org.apache.commons.csv.CSVFormat; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.MockedConstruction; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class S3StreamCopierTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(S3StreamCopierTest.class); + + private static final int PART_SIZE = 5; + private static final S3DestinationConfig S3_CONFIG = new S3DestinationConfig( + "fake-endpoint", + "fake-bucket", + "fake-bucketPath", + "fake-region", + "fake-access-key-id", + "fake-secret-access-key", + PART_SIZE, + null); + private static final ConfiguredAirbyteStream CONFIGURED_STREAM = new ConfiguredAirbyteStream() + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(new AirbyteStream() + .withName("fake-stream") + .withNamespace("fake-namespace")); + private static final int UPLOAD_THREADS = 10; + private static final int QUEUE_CAPACITY = 10; + // equivalent to Thu, 09 Dec 2021 19:17:54 GMT + private static final Timestamp UPLOAD_TIME = Timestamp.from(Instant.ofEpochMilli(1639077474000L)); + private static final int MAX_PARTS_PER_FILE = 42; + + private AmazonS3Client s3Client; + private JdbcDatabase db; + private SqlOperations sqlOperations; + private S3StreamCopier copier; + + private MockedConstruction csvWriterMockedConstruction; + private List csvWriterConstructorArguments; + + private List copyArguments; + + private record S3CsvWriterArguments(S3DestinationConfig config, + ConfiguredAirbyteStream stream, + Timestamp uploadTime, + int uploadThreads, + int queueCapacity, + boolean writeHeader, + CSVFormat csvSettings, + CsvSheetGenerator csvSheetGenerator) { + + } + + private record CopyArguments(JdbcDatabase database, + String s3FileLocation, + String schema, + String tableName, + S3DestinationConfig s3Config) { + + } + + @BeforeEach + public void setup() { + s3Client = mock(AmazonS3Client.class); + db = mock(JdbcDatabase.class); + sqlOperations = mock(SqlOperations.class); + + csvWriterConstructorArguments = new ArrayList<>(); + copyArguments = new ArrayList<>(); + + // This is basically RETURNS_SELF, except with getMultiPartOutputStreams configured correctly. + // Other non-void methods (e.g. toString()) will return null. + csvWriterMockedConstruction = mockConstruction( + S3CsvWriter.class, + (mock, context) -> { + // Normally, the S3CsvWriter would return a path that ends in a UUID, but this mock will generate an + // int ID to make our asserts easier. + doReturn(String.format("fakeOutputPath-%05d", csvWriterConstructorArguments.size())).when(mock).getOutputPath(); + + // Mockito doesn't seem to provide an easy way to actually retrieve these arguments later on, so + // manually store them on construction. + // _PowerMockito_ does, but I didn't want to set up that additional dependency. + final List arguments = context.arguments(); + csvWriterConstructorArguments.add(new S3CsvWriterArguments( + (S3DestinationConfig) arguments.get(0), + (ConfiguredAirbyteStream) arguments.get(2), + (Timestamp) arguments.get(3), + (int) arguments.get(4), + (int) arguments.get(5), + (boolean) arguments.get(6), + (CSVFormat) arguments.get(7), + (CsvSheetGenerator) arguments.get(8))); + }); + + copier = new S3StreamCopier( + // In reality, this is normally a UUID - see CopyConsumerFactory#createWriteConfigs + "fake-staging-folder", + "fake-schema", + s3Client, + db, + new S3CopyConfig(true, S3_CONFIG), + new ExtendedNameTransformer(), + sqlOperations, + CONFIGURED_STREAM, + UPLOAD_TIME, + MAX_PARTS_PER_FILE) { + + @Override + public void copyS3CsvFileIntoTable( + final JdbcDatabase database, + final String s3FileLocation, + final String schema, + final String tableName, + final S3DestinationConfig s3Config) { + copyArguments.add(new CopyArguments(database, s3FileLocation, schema, tableName, s3Config)); + } + + }; + } + + @AfterEach + public void teardown() { + csvWriterMockedConstruction.close(); + } + + @Test + public void createSequentialStagingFiles_when_multipleFilesRequested() { + // When we call prepareStagingFile() the first time, it should create exactly one S3CsvWriter. The + // next (MAX_PARTS_PER_FILE - 1) invocations + // should reuse that same writer. + for (var i = 0; i < MAX_PARTS_PER_FILE; i++) { + final String file = copier.prepareStagingFile(); + assertEquals("fakeOutputPath-00000", file, "preparing file number " + i); + assertEquals(1, csvWriterMockedConstruction.constructed().size()); + checkCsvWriterArgs(csvWriterConstructorArguments.get(0)); + } + + // Now that we've hit the MAX_PARTS_PER_FILE, we should start a new writer + final String secondFile = copier.prepareStagingFile(); + assertEquals("fakeOutputPath-00001", secondFile); + final List secondManagers = csvWriterMockedConstruction.constructed(); + assertEquals(2, secondManagers.size()); + checkCsvWriterArgs(csvWriterConstructorArguments.get(1)); + } + + private void checkCsvWriterArgs(final S3CsvWriterArguments args) { + assertEquals(S3_CONFIG.cloneWithFormatConfig(new S3CsvFormatConfig(null, (long) PART_SIZE)), args.config); + assertEquals(CONFIGURED_STREAM, args.stream); + assertEquals(UPLOAD_TIME, args.uploadTime); + assertEquals(UPLOAD_THREADS, args.uploadThreads); + assertEquals(QUEUE_CAPACITY, args.queueCapacity); + assertFalse(args.writeHeader); + assertEquals(CSVFormat.DEFAULT, args.csvSettings); + assertTrue( + args.csvSheetGenerator instanceof StagingDatabaseCsvSheetGenerator, + "Sheet generator was actually a " + args.csvSheetGenerator.getClass()); + } + + @Test + public void closesS3Upload_when_stagingUploaderClosedSuccessfully() throws Exception { + copier.prepareStagingFile(); + + copier.closeStagingUploader(false); + + final List managers = csvWriterMockedConstruction.constructed(); + final S3CsvWriter manager = managers.get(0); + verify(manager).close(false); + } + + @Test + public void closesS3Upload_when_stagingUploaderClosedFailingly() throws Exception { + copier.prepareStagingFile(); + + copier.closeStagingUploader(true); + + final List managers = csvWriterMockedConstruction.constructed(); + final S3CsvWriter manager = managers.get(0); + verify(manager).close(true); + } + + @Test + public void deletesStagingFiles() throws Exception { + copier.prepareStagingFile(); + doReturn(true).when(s3Client).doesObjectExist("fake-bucket", "fakeOutputPath-00000"); + + copier.removeFileAndDropTmpTable(); + + verify(s3Client).deleteObject("fake-bucket", "fakeOutputPath-00000"); + } + + @Test + public void doesNotDeleteStagingFiles_if_purgeStagingDataDisabled() throws Exception { + copier = new S3StreamCopier( + "fake-staging-folder", + "fake-schema", + s3Client, + db, + // Explicitly disable purgeStagingData + new S3CopyConfig(false, S3_CONFIG), + new ExtendedNameTransformer(), + sqlOperations, + CONFIGURED_STREAM, + UPLOAD_TIME, + MAX_PARTS_PER_FILE) { + + @Override + public void copyS3CsvFileIntoTable( + final JdbcDatabase database, + final String s3FileLocation, + final String schema, + final String tableName, + final S3DestinationConfig s3Config) { + copyArguments.add(new CopyArguments(database, s3FileLocation, schema, tableName, s3Config)); + } + + }; + + copier.prepareStagingFile(); + doReturn(true).when(s3Client).doesObjectExist("fake-bucket", "fakeOutputPath-00000"); + + copier.removeFileAndDropTmpTable(); + + verify(s3Client, never()).deleteObject("fake-bucket", "fakeOutputPath-00000"); + } + + @Test + public void copiesCorrectFilesToTable() throws Exception { + // Generate two files + for (int i = 0; i < MAX_PARTS_PER_FILE + 1; i++) { + copier.prepareStagingFile(); + } + + copier.copyStagingFileToTemporaryTable(); + + assertEquals(2, copyArguments.size(), "Number of invocations was actually " + copyArguments.size() + ". Arguments were " + copyArguments); + + // S3StreamCopier operates on these from a HashMap, so need to sort them in order to assert in a + // sane way. + final List sortedArgs = copyArguments.stream().sorted(Comparator.comparing(arg -> arg.s3FileLocation)).toList(); + for (int i = 0; i < sortedArgs.size(); i++) { + LOGGER.info("Checking arguments for index {}", i); + final CopyArguments args = sortedArgs.get(i); + assertEquals(String.format("s3://fake-bucket/fakeOutputPath-%05d", i), args.s3FileLocation); + assertEquals("fake-schema", args.schema); + assertTrue(args.tableName.endsWith("fake_stream"), "Table name was actually " + args.tableName); + } + } + +} diff --git a/airbyte-integrations/connectors/destination-kafka/Dockerfile b/airbyte-integrations/connectors/destination-kafka/Dockerfile index de08194ffbeb..110dfa259902 100644 --- a/airbyte-integrations/connectors/destination-kafka/Dockerfile +++ b/airbyte-integrations/connectors/destination-kafka/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-kafka -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/destination-kafka diff --git a/airbyte-integrations/connectors/destination-keen/Dockerfile b/airbyte-integrations/connectors/destination-keen/Dockerfile index c0d05380fc96..5d75f6c758de 100644 --- a/airbyte-integrations/connectors/destination-keen/Dockerfile +++ b/airbyte-integrations/connectors/destination-keen/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-keen -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.2.0 LABEL io.airbyte.name=airbyte/destination-keen diff --git a/airbyte-integrations/connectors/destination-kinesis/Dockerfile b/airbyte-integrations/connectors/destination-kinesis/Dockerfile index 09eb14c53642..cbcbd0654d20 100644 --- a/airbyte-integrations/connectors/destination-kinesis/Dockerfile +++ b/airbyte-integrations/connectors/destination-kinesis/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-kinesis -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/destination-kinesis diff --git a/airbyte-integrations/connectors/destination-local-json/Dockerfile b/airbyte-integrations/connectors/destination-local-json/Dockerfile index f355b2ef7b5c..b256dee6fd5a 100644 --- a/airbyte-integrations/connectors/destination-local-json/Dockerfile +++ b/airbyte-integrations/connectors/destination-local-json/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-local-json -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.2.8 LABEL io.airbyte.name=airbyte/destination-local-json diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/Dockerfile b/airbyte-integrations/connectors/destination-mariadb-columnstore/Dockerfile index e89afe7b9dfd..f8f9b323b0a6 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/Dockerfile +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-mariadb-columnstore -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.1 LABEL io.airbyte.name=airbyte/destination-mariadb-columnstore diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java index b972857faae8..cdf4a94498fb 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java @@ -110,7 +110,7 @@ protected List resolveIdentifier(final String identifier) { @Override protected void setup(TestDestinationEnv testEnv) throws Exception { - DockerImageName mcsImage = DockerImageName.parse("mariadb/columnstore").asCompatibleSubstituteFor("mariadb"); + DockerImageName mcsImage = DockerImageName.parse("fengdi/columnstore:1.5.2").asCompatibleSubstituteFor("mariadb"); db = new MariaDBContainer(mcsImage); db.start(); diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java index 9eea8015a5fd..907397a1efa9 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java @@ -127,7 +127,7 @@ protected void setup(TestDestinationEnv testEnv) throws Exception { } private void startAndInitJdbcContainer() throws Exception { - DockerImageName mcsImage = DockerImageName.parse("mariadb/columnstore").asCompatibleSubstituteFor("mariadb"); + DockerImageName mcsImage = DockerImageName.parse("fengdi/columnstore:1.5.2").asCompatibleSubstituteFor("mariadb"); db = new MariaDBContainer<>(mcsImage) .withNetwork(bastion.getNetWork()); db.start(); diff --git a/airbyte-integrations/connectors/destination-meilisearch/Dockerfile b/airbyte-integrations/connectors/destination-meilisearch/Dockerfile index cf8a9c7f2c9a..ab0050425add 100644 --- a/airbyte-integrations/connectors/destination-meilisearch/Dockerfile +++ b/airbyte-integrations/connectors/destination-meilisearch/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-meilisearch -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.2.10 LABEL io.airbyte.name=airbyte/destination-meilisearch diff --git a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/Dockerfile index ab8f6a75f76f..db09de7d11a3 100644 --- a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-mongodb-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/destination-mongodb-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-mongodb/Dockerfile b/airbyte-integrations/connectors/destination-mongodb/Dockerfile index 93554dbed4bc..f62f4e02ecc0 100644 --- a/airbyte-integrations/connectors/destination-mongodb/Dockerfile +++ b/airbyte-integrations/connectors/destination-mongodb/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-mongodb -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/destination-mongodb diff --git a/airbyte-integrations/connectors/destination-mqtt/Dockerfile b/airbyte-integrations/connectors/destination-mqtt/Dockerfile index 39410c2f65b8..f260631f53c3 100644 --- a/airbyte-integrations/connectors/destination-mqtt/Dockerfile +++ b/airbyte-integrations/connectors/destination-mqtt/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-mqtt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/destination-mqtt diff --git a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile index 13d6a856034c..41e181626ddb 100644 --- a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-mssql-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/destination-mssql-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-mssql/Dockerfile b/airbyte-integrations/connectors/destination-mssql/Dockerfile index bd8bbaec165b..fe0d0abe3ada 100644 --- a/airbyte-integrations/connectors/destination-mssql/Dockerfile +++ b/airbyte-integrations/connectors/destination-mssql/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-mssql -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.12 LABEL io.airbyte.name=airbyte/destination-mssql diff --git a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/Dockerfile index 319f9b10c8bc..ce29aeea3ba3 100644 --- a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-mysql-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/destination-mysql-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-mysql/Dockerfile b/airbyte-integrations/connectors/destination-mysql/Dockerfile index de3e347e51c0..6c9d8b87d2ab 100644 --- a/airbyte-integrations/connectors/destination-mysql/Dockerfile +++ b/airbyte-integrations/connectors/destination-mysql/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-mysql -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.15 LABEL io.airbyte.name=airbyte/destination-mysql diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/Dockerfile index fde019832a00..fe4be6673d1c 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-oracle-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.1 LABEL io.airbyte.name=airbyte/destination-oracle-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-oracle/Dockerfile b/airbyte-integrations/connectors/destination-oracle/Dockerfile index 3d27bb9924d0..80af3e644480 100644 --- a/airbyte-integrations/connectors/destination-oracle/Dockerfile +++ b/airbyte-integrations/connectors/destination-oracle/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-oracle -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.12 LABEL io.airbyte.name=airbyte/destination-oracle diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile index b4189bd8a7d9..fd55e7d26ef5 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-postgres-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/destination-postgres-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-postgres/Dockerfile b/airbyte-integrations/connectors/destination-postgres/Dockerfile index 8274d7849c8b..add68f07ddf4 100644 --- a/airbyte-integrations/connectors/destination-postgres/Dockerfile +++ b/airbyte-integrations/connectors/destination-postgres/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-postgres -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.3.13 LABEL io.airbyte.name=airbyte/destination-postgres diff --git a/airbyte-integrations/connectors/destination-pubsub/Dockerfile b/airbyte-integrations/connectors/destination-pubsub/Dockerfile index e5d979ef2136..648d7331cc6a 100644 --- a/airbyte-integrations/connectors/destination-pubsub/Dockerfile +++ b/airbyte-integrations/connectors/destination-pubsub/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-pubsub -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.1 LABEL io.airbyte.name=airbyte/destination-pubsub diff --git a/airbyte-integrations/connectors/destination-pulsar/Dockerfile b/airbyte-integrations/connectors/destination-pulsar/Dockerfile index c5ffa3415f27..4a175f7f49e1 100644 --- a/airbyte-integrations/connectors/destination-pulsar/Dockerfile +++ b/airbyte-integrations/connectors/destination-pulsar/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-pulsar -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/destination-pulsar diff --git a/airbyte-integrations/connectors/destination-redis/Dockerfile b/airbyte-integrations/connectors/destination-redis/Dockerfile index 54a668581446..89a89e63faf6 100644 --- a/airbyte-integrations/connectors/destination-redis/Dockerfile +++ b/airbyte-integrations/connectors/destination-redis/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-redis -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/destination-redis diff --git a/airbyte-integrations/connectors/destination-redshift/Dockerfile b/airbyte-integrations/connectors/destination-redshift/Dockerfile index bb5be9d62c0a..e39cbb2d28fc 100644 --- a/airbyte-integrations/connectors/destination-redshift/Dockerfile +++ b/airbyte-integrations/connectors/destination-redshift/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION destination-redshift -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar +ADD build/distributions/${APPLICATION}*.tar /airbyte -RUN tar xf ${APPLICATION}.tar --strip-components=1 - -LABEL io.airbyte.version=0.3.21 +LABEL io.airbyte.version=0.3.23 LABEL io.airbyte.name=airbyte/destination-redshift diff --git a/airbyte-integrations/connectors/destination-redshift/build.gradle b/airbyte-integrations/connectors/destination-redshift/build.gradle index 237cc220373d..8f678f6c75c8 100644 --- a/airbyte-integrations/connectors/destination-redshift/build.gradle +++ b/airbyte-integrations/connectors/destination-redshift/build.gradle @@ -33,6 +33,7 @@ dependencies { testImplementation 'org.apache.commons:commons-text:1.9' testImplementation 'org.apache.commons:commons-lang3:3.11' testImplementation 'org.apache.commons:commons-dbcp2:2.7.0' + testImplementation "org.mockito:mockito-inline:4.1.0" integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-redshift') diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java index 1220ecfc1988..1b5249d0ed7f 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java @@ -13,6 +13,7 @@ import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.copy.CopyConsumerFactory; import io.airbyte.integrations.destination.jdbc.copy.CopyDestination; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3CopyConfig; import io.airbyte.integrations.destination.s3.S3Destination; import io.airbyte.integrations.destination.s3.S3DestinationConfig; import io.airbyte.protocol.models.AirbyteMessage; @@ -42,7 +43,7 @@ public AirbyteMessageConsumer getConsumer(final JsonNode config, getDatabase(config), getSqlOperations(), getNameTransformer(), - getS3DestinationConfig(config), + new S3CopyConfig(S3CopyConfig.shouldPurgeStagingData(config), getS3DestinationConfig(config)), catalog, new RedshiftStreamCopierFactory(), getConfiguredSchema(config)); diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java index 41a450389d00..c296ddf1d126 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java @@ -6,16 +6,19 @@ import com.amazonaws.services.s3.AmazonS3; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.annotations.VisibleForTesting; import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.string.Strings; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3CopyConfig; import io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopier; import io.airbyte.integrations.destination.redshift.manifest.Entry; import io.airbyte.integrations.destination.redshift.manifest.Manifest; import io.airbyte.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.protocol.models.DestinationSyncMode; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import java.sql.Timestamp; +import java.time.Instant; import java.util.Optional; import java.util.UUID; import java.util.stream.Collectors; @@ -25,22 +28,54 @@ public class RedshiftStreamCopier extends S3StreamCopier { private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftStreamCopier.class); - private static final int FILE_PREFIX_LENGTH = 5; + // From https://docs.aws.amazon.com/redshift/latest/dg/t_loading-tables-from-s3.html + // "Split your load data files so that the files are about equal size, between 1 MB and 1 GB after + // compression" + public static final int MAX_PARTS_PER_FILE = 4; private final ObjectMapper objectMapper; private String manifestFilePath = null; public RedshiftStreamCopier(final String stagingFolder, - final DestinationSyncMode destSyncMode, final String schema, - final String streamName, final AmazonS3 client, final JdbcDatabase db, - final S3DestinationConfig s3Config, + final S3CopyConfig config, final ExtendedNameTransformer nameTransformer, - final SqlOperations sqlOperations) { - super(stagingFolder, destSyncMode, schema, streamName, Strings.addRandomSuffix("", "", FILE_PREFIX_LENGTH) + "_" + streamName, - client, db, s3Config, nameTransformer, sqlOperations); + final SqlOperations sqlOperations, + final ConfiguredAirbyteStream configuredAirbyteStream) { + this( + stagingFolder, + schema, + client, + db, + config, + nameTransformer, + sqlOperations, + Timestamp.from(Instant.now()), + configuredAirbyteStream); + } + + @VisibleForTesting + RedshiftStreamCopier(final String stagingFolder, + final String schema, + final AmazonS3 client, + final JdbcDatabase db, + final S3CopyConfig config, + final ExtendedNameTransformer nameTransformer, + final SqlOperations sqlOperations, + final Timestamp uploadTime, + final ConfiguredAirbyteStream configuredAirbyteStream) { + super(stagingFolder, + schema, + client, + db, + config, + nameTransformer, + sqlOperations, + configuredAirbyteStream, + uploadTime, + MAX_PARTS_PER_FILE); objectMapper = new ObjectMapper(); } @@ -55,8 +90,7 @@ public void copyStagingFileToTemporaryTable() { } @Override - public void copyS3CsvFileIntoTable( - final JdbcDatabase database, + public void copyS3CsvFileIntoTable(final JdbcDatabase database, final String s3FileLocation, final String schema, final String tableName, @@ -76,6 +110,11 @@ public void removeFileAndDropTmpTable() throws Exception { } } + @VisibleForTesting + String getTmpTableName() { + return tmpTableName; + } + /** * Creates the contents of a manifest file given the `s3StagingFiles`. There must be at least one * entry in a manifest file otherwise it is not considered valid for the COPY command. @@ -83,11 +122,11 @@ public void removeFileAndDropTmpTable() throws Exception { * @return null if no stagingFiles exist otherwise the manifest body String */ private String createManifest() { - if (s3StagingFiles.isEmpty()) { + if (stagingWritersByFile.isEmpty()) { return null; } - final var s3FileEntries = s3StagingFiles.stream() + final var s3FileEntries = stagingWritersByFile.keySet().stream() .map(filePath -> new Entry(getFullS3Path(s3Config.getBucketName(), filePath))) .collect(Collectors.toList()); final var manifest = new Manifest(s3FileEntries); @@ -103,7 +142,7 @@ private String createManifest() { */ private String putManifest(final String manifestContents) { manifestFilePath = - String.join("/", stagingFolder, schemaName, String.format("%s.manifest", UUID.randomUUID())); + String.join("/", s3Config.getBucketPath(), stagingFolder, schemaName, String.format("%s.manifest", UUID.randomUUID())); s3Client.putObject(s3Config.getBucketName(), manifestFilePath, manifestContents); diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierFactory.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierFactory.java index 2f24bdae1fe2..9876f03800b2 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierFactory.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierFactory.java @@ -9,24 +9,25 @@ import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3CopyConfig; import io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopierFactory; -import io.airbyte.integrations.destination.s3.S3DestinationConfig; -import io.airbyte.protocol.models.DestinationSyncMode; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +/** + * Very similar to the {@link S3StreamCopierFactory}, but we need some additional + */ public class RedshiftStreamCopierFactory extends S3StreamCopierFactory { @Override public StreamCopier create(final String stagingFolder, - final DestinationSyncMode syncMode, final String schema, - final String streamName, final AmazonS3 s3Client, final JdbcDatabase db, - final S3DestinationConfig s3Config, + final S3CopyConfig config, final ExtendedNameTransformer nameTransformer, - final SqlOperations sqlOperations) - throws Exception { - return new RedshiftStreamCopier(stagingFolder, syncMode, schema, streamName, s3Client, db, s3Config, nameTransformer, sqlOperations); + final SqlOperations sqlOperations, + final ConfiguredAirbyteStream configuredStream) { + return new RedshiftStreamCopier(stagingFolder, schema, s3Client, db, config, nameTransformer, sqlOperations, configuredStream); } } diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json index cade59598ca9..360372f2ca89 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json @@ -54,6 +54,12 @@ "description": "The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.", "examples": ["airbyte.staging"] }, + "s3_bucket_path": { + "title": "S3 Bucket Path", + "type": "string", + "description": "The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory.", + "examples": ["data_sync/test"] + }, "s3_bucket_region": { "title": "S3 Bucket Region", "type": "string", @@ -105,6 +111,12 @@ "examples": ["10"], "description": "Optional. Increase this if syncing tables larger than 100GB. Only relevant for COPY. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default limit of 100GB tables. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care.", "title": "Stream Part Size" + }, + "purge_staging_data": { + "title": "Purge Staging Files and Tables", + "type": "boolean", + "description": "Whether to delete the staging files from S3 after completing the sync. See the docs for details. Only relevant for COPY. Defaults to true.", + "default": true } } } diff --git a/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierTest.java b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierTest.java new file mode 100644 index 000000000000..3f2b44536be6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierTest.java @@ -0,0 +1,159 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redshift; + +import static java.util.Comparator.comparing; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import com.amazonaws.services.s3.AmazonS3Client; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.jdbc.SqlOperations; +import io.airbyte.integrations.destination.jdbc.copy.s3.S3CopyConfig; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.DestinationSyncMode; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.Instant; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +class RedshiftStreamCopierTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftStreamCopierTest.class); + + private static final int PART_SIZE = 5; + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + // The full path would be something like + // "fake-namespace/fake_stream/2021_12_09_1639077474000_e549e712-b89c-4272-9496-9690ba7f973e.csv" + // The namespace and stream have their hyphens replaced by underscores. Not super clear that that's + // actually required. + // 2021_12_09_1639077474000 is generated from the timestamp. It's followed by a random UUID, in case + // we need to create multiple files. + private static final String EXPECTED_OBJECT_BEGINNING = "fake-bucketPath/fake_namespace/fake_stream/2021_12_09_1639077474000_"; + private static final String EXPECTED_OBJECT_ENDING = ".csv"; + + // equivalent to Thu, 09 Dec 2021 19:17:54 GMT + private static final Timestamp UPLOAD_TIME = Timestamp.from(Instant.ofEpochMilli(1639077474000L)); + + private AmazonS3Client s3Client; + private JdbcDatabase db; + private SqlOperations sqlOperations; + private RedshiftStreamCopier copier; + + @BeforeEach + public void setup() { + s3Client = mock(AmazonS3Client.class, RETURNS_DEEP_STUBS); + db = mock(JdbcDatabase.class); + sqlOperations = mock(SqlOperations.class); + + copier = new RedshiftStreamCopier( + // In reality, this is normally a UUID - see CopyConsumerFactory#createWriteConfigs + "fake-staging-folder", + "fake-schema", + s3Client, + db, + new S3CopyConfig( + true, + new S3DestinationConfig( + "fake-endpoint", + "fake-bucket", + "fake-bucketPath", + "fake-region", + "fake-access-key-id", + "fake-secret-access-key", + PART_SIZE, + null)), + new ExtendedNameTransformer(), + sqlOperations, + UPLOAD_TIME, + new ConfiguredAirbyteStream() + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(new AirbyteStream() + .withName("fake-stream") + .withNamespace("fake-namespace"))); + } + + @Test + public void copiesCorrectFilesToTable() throws SQLException { + // Generate two files + final String file1 = copier.prepareStagingFile(); + for (int i = 0; i < RedshiftStreamCopier.MAX_PARTS_PER_FILE - 1; i++) { + copier.prepareStagingFile(); + } + final String file2 = copier.prepareStagingFile(); + final List expectedFiles = List.of(file1, file2).stream().sorted().toList(); + + copier.copyStagingFileToTemporaryTable(); + + final AtomicReference manifestUuid = new AtomicReference<>(); + verify(s3Client).putObject( + eq("fake-bucket"), + argThat(path -> { + final boolean startsCorrectly = path.startsWith("fake-bucketPath/fake-staging-folder/fake-schema/"); + final boolean endsCorrectly = path.endsWith(".manifest"); + // Make sure that we have a valid UUID + manifestUuid.set(path.replaceFirst("^fake-bucketPath/fake-staging-folder/fake-schema/", "").replaceFirst(".manifest$", "")); + UUID.fromString(manifestUuid.get()); + + return startsCorrectly && endsCorrectly; + }), + (String) argThat(manifestStr -> { + try { + final JsonNode manifest = OBJECT_MAPPER.readTree((String) manifestStr); + final List entries = Lists.newArrayList(manifest.get("entries").elements()).stream() + .sorted(comparing(entry -> entry.get("url").asText())).toList(); + + boolean entriesAreCorrect = true; + for (int i = 0; i < 2; i++) { + final String expectedFilename = expectedFiles.get(i); + final JsonNode manifestEntry = entries.get(i); + entriesAreCorrect &= isManifestEntryCorrect(manifestEntry, expectedFilename); + if (!entriesAreCorrect) { + LOGGER.error("Invalid entry: {}", manifestEntry); + } + } + + return entriesAreCorrect && entries.size() == 2; + } catch (final JsonProcessingException e) { + throw new RuntimeException(e); + } + })); + + verify(db).execute(String.format( + """ + COPY fake-schema.%s FROM 's3://fake-bucket/fake-bucketPath/fake-staging-folder/fake-schema/%s.manifest' + CREDENTIALS 'aws_access_key_id=fake-access-key-id;aws_secret_access_key=fake-secret-access-key' + CSV REGION 'fake-region' TIMEFORMAT 'auto' + STATUPDATE OFF + MANIFEST;""", + copier.getTmpTableName(), + manifestUuid.get())); + } + + private static boolean isManifestEntryCorrect(final JsonNode entry, final String expectedFilename) { + final String url = entry.get("url").asText(); + final boolean mandatory = entry.get("mandatory").asBoolean(); + + return ("s3://fake-bucket/" + expectedFilename).equals(url) && mandatory; + } + +} diff --git a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetDestination.java b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetDestination.java index c7fc55385db0..32f338b337b7 100644 --- a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetDestination.java +++ b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetDestination.java @@ -21,74 +21,70 @@ import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; - import java.util.Map; import java.util.UUID; import java.util.function.Consumer; - import org.apache.commons.lang3.RandomStringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RocksetDestination extends BaseConnector implements Destination { - private static final Logger LOGGER = LoggerFactory.getLogger(RocksetDestination.class); - private static final ObjectMapper mapper = new ObjectMapper(); - - public static void main(String[] args) throws Exception { - new IntegrationRunner(new RocksetDestination()).run(args); - } - - @Override - public AirbyteConnectionStatus check(final JsonNode config) { - ApiClient client = null; - String workspace = null; - // Create a temporary table - final String cname = "tmp_test_airbyte_collection_" + RandomStringUtils.randomAlphabetic(7).toLowerCase(); - - try { - client = RocksetUtils.apiClientFromConfig(config); - - workspace = config.get(ROCKSET_WORKSPACE_ID).asText(); - RocksetUtils.createWorkspaceIfNotExists(client, workspace); - - - RocksetUtils.createCollectionIfNotExists(client, workspace, cname); - RocksetUtils.waitUntilCollectionReady(client, workspace, cname); - - // Write a single document - final String unique = UUID.randomUUID().toString(); - final Map dummyRecord = ImmutableMap.of("_id", unique); - final AddDocumentsRequest req = new AddDocumentsRequest(); - req.addDataItem(mapper.convertValue(dummyRecord, new TypeReference<>() { - })); - new DocumentsApi(client).add(workspace, cname, req); - - // Verify that the doc shows up - final String sql = String.format("SELECT * FROM %s.%s WHERE _id = '%s';", workspace, cname, unique); - RocksetUtils.waitUntilDocCount(client, sql, 1); - - LOGGER.info("Check succeeded"); - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - } catch (Exception e) { - LOGGER.info("Check failed.", e); - return new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage(e.getMessage() != null ? e.getMessage() : e.toString()); - } finally { - // Delete the collection - if (client != null && workspace != null) { - RocksetUtils.deleteCollectionIfExists(client, workspace, cname); - } - - } - } + private static final Logger LOGGER = LoggerFactory.getLogger(RocksetDestination.class); + private static final ObjectMapper mapper = new ObjectMapper(); + + public static void main(String[] args) throws Exception { + new IntegrationRunner(new RocksetDestination()).run(args); + } + + @Override + public AirbyteConnectionStatus check(final JsonNode config) { + ApiClient client = null; + String workspace = null; + // Create a temporary table + final String cname = "tmp_test_airbyte_collection_" + RandomStringUtils.randomAlphabetic(7).toLowerCase(); + + try { + client = RocksetUtils.apiClientFromConfig(config); + + workspace = config.get(ROCKSET_WORKSPACE_ID).asText(); + RocksetUtils.createWorkspaceIfNotExists(client, workspace); + + RocksetUtils.createCollectionIfNotExists(client, workspace, cname); + RocksetUtils.waitUntilCollectionReady(client, workspace, cname); + + // Write a single document + final String unique = UUID.randomUUID().toString(); + final Map dummyRecord = ImmutableMap.of("_id", unique); + final AddDocumentsRequest req = new AddDocumentsRequest(); + req.addDataItem(mapper.convertValue(dummyRecord, new TypeReference<>() {})); + new DocumentsApi(client).add(workspace, cname, req); + + // Verify that the doc shows up + final String sql = String.format("SELECT * FROM %s.%s WHERE _id = '%s';", workspace, cname, unique); + RocksetUtils.waitUntilDocCount(client, sql, 1); + + LOGGER.info("Check succeeded"); + return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); + } catch (Exception e) { + LOGGER.info("Check failed.", e); + return new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage(e.getMessage() != null ? e.getMessage() : e.toString()); + } finally { + // Delete the collection + if (client != null && workspace != null) { + RocksetUtils.deleteCollectionIfExists(client, workspace, cname); + } - @Override - public AirbyteMessageConsumer getConsumer( - JsonNode config, - ConfiguredAirbyteCatalog catalog, - Consumer outputRecordCollector) - throws Exception { - return new RocksetWriteApiConsumer(config, catalog, outputRecordCollector); } + } + + @Override + public AirbyteMessageConsumer getConsumer( + JsonNode config, + ConfiguredAirbyteCatalog catalog, + Consumer outputRecordCollector) + throws Exception { + return new RocksetWriteApiConsumer(config, catalog, outputRecordCollector); + } } diff --git a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetUtils.java b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetUtils.java index e392da1634fd..9ec1dab6a850 100644 --- a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetUtils.java +++ b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetUtils.java @@ -26,245 +26,239 @@ import com.rockset.client.model.QueryRequestSql; import com.rockset.client.model.QueryResponse; import io.airbyte.commons.lang.Exceptions; -import org.awaitility.Awaitility; -import org.awaitility.Duration; -import org.awaitility.core.ConditionFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.nio.charset.Charset; import java.util.List; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import org.awaitility.Awaitility; +import org.awaitility.Duration; +import org.awaitility.core.ConditionFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class RocksetUtils { - private static final Logger LOGGER = LoggerFactory.getLogger(RocksetUtils.class); - - public static final String ROCKSET_WORKSPACE_ID = "workspace"; - public static final String API_KEY_ID = "api_key"; - public static final String API_SERVER_ID = "api_server"; - public static final Duration DEFAULT_TIMEOUT = new Duration(20, TimeUnit.MINUTES); - public static final Duration DEFAULT_POLL_INTERVAL = Duration.FIVE_SECONDS; - private static final java.time.Duration DEFAULT_HTTP_CLIENT_TIMEOUT = java.time.Duration.ofMinutes(1L); - private static final String DEFAULT_ROCKSET_CLIENT_VERSION = "0.9.0"; - - public static ApiClient apiClientFromConfig(JsonNode config) { - final String apiKey = config.get(API_KEY_ID).asText(); - final String apiServer = config.get(API_SERVER_ID).asText(); - return apiClient(apiKey, apiServer); + private static final Logger LOGGER = LoggerFactory.getLogger(RocksetUtils.class); + + public static final String ROCKSET_WORKSPACE_ID = "workspace"; + public static final String API_KEY_ID = "api_key"; + public static final String API_SERVER_ID = "api_server"; + public static final Duration DEFAULT_TIMEOUT = new Duration(20, TimeUnit.MINUTES); + public static final Duration DEFAULT_POLL_INTERVAL = Duration.FIVE_SECONDS; + private static final java.time.Duration DEFAULT_HTTP_CLIENT_TIMEOUT = java.time.Duration.ofMinutes(1L); + private static final String DEFAULT_ROCKSET_CLIENT_VERSION = "0.9.0"; + + public static ApiClient apiClientFromConfig(JsonNode config) { + final String apiKey = config.get(API_KEY_ID).asText(); + final String apiServer = config.get(API_SERVER_ID).asText(); + return apiClient(apiKey, apiServer); + } + + public static ApiClient apiClient(String apiKey, String apiServer) { + final ApiClient client = new ApiClient(); + + client.setReadTimeout((int) DEFAULT_HTTP_CLIENT_TIMEOUT.toMillis()) + .setConnectTimeout((int) DEFAULT_HTTP_CLIENT_TIMEOUT.toMillis()) + .setWriteTimeout((int) DEFAULT_HTTP_CLIENT_TIMEOUT.toMillis()); + + client.setApiKey(apiKey); + client.setApiServer(apiServer); + client.setVersion(DEFAULT_ROCKSET_CLIENT_VERSION); + return client; + } + + public static void createWorkspaceIfNotExists(ApiClient client, String workspace) { + final CreateWorkspaceRequest request = new CreateWorkspaceRequest().name(workspace); + + try { + new WorkspacesApi(client).create(request); + LOGGER.info(String.format("Created workspace %s", workspace)); + } catch (ApiException e) { + if (e.getCode() == 400 && e.getErrorModel().getType() == ErrorModel.TypeEnum.ALREADYEXISTS) { + LOGGER.info(String.format("Workspace %s already exists", workspace)); + return; + } + + throw new RuntimeException(e); + } catch (Exception e) { + throw new RuntimeException(e); } - - public static ApiClient apiClient(String apiKey, String apiServer) { - final ApiClient client = new ApiClient(); - - client.setReadTimeout((int) DEFAULT_HTTP_CLIENT_TIMEOUT.toMillis()) - .setConnectTimeout((int) DEFAULT_HTTP_CLIENT_TIMEOUT.toMillis()) - .setWriteTimeout((int) DEFAULT_HTTP_CLIENT_TIMEOUT.toMillis()); - - client.setApiKey(apiKey); - client.setApiServer(apiServer); - client.setVersion(DEFAULT_ROCKSET_CLIENT_VERSION); - return client; + } + + // Assumes the workspace exists + public static void createCollectionIfNotExists(ApiClient client, String workspace, String cname) { + final CreateCollectionRequest request = new CreateCollectionRequest().name(cname); + try { + new CollectionsApi(client).create(workspace, request); + LOGGER.info(String.format("Created collection %s.%s", workspace, cname)); + } catch (ApiException e) { + if (e.getCode() == 400 && e.getErrorModel().getType() == ErrorModel.TypeEnum.ALREADYEXISTS) { + LOGGER.info(String.format("Collection %s.%s already exists", workspace, cname)); + return; + } + throw new RuntimeException(e); + } catch (Exception e) { + throw new RuntimeException(e); } - - public static void createWorkspaceIfNotExists(ApiClient client, String workspace) { - final CreateWorkspaceRequest request = new CreateWorkspaceRequest().name(workspace); - - try { - new WorkspacesApi(client).create(request); - LOGGER.info(String.format("Created workspace %s", workspace)); - } catch (ApiException e) { - if (e.getCode() == 400 && e.getErrorModel().getType() == ErrorModel.TypeEnum.ALREADYEXISTS) { - LOGGER.info(String.format("Workspace %s already exists", workspace)); - return; - } - - throw new RuntimeException(e); - } catch (Exception e) { - throw new RuntimeException(e); - } + } + + // Assumes the collection exists + public static void deleteCollectionIfExists(ApiClient client, String workspace, String cname) { + try { + new CollectionsApi(client).delete(workspace, cname); + LOGGER.info(String.format("Deleted collection %s.%s", workspace, cname)); + } catch (ApiException e) { + if (e.getCode() == 404 && e.getErrorModel().getType() == ErrorModel.TypeEnum.NOTFOUND) { + LOGGER.info(String.format("Collection %s.%s does not exist", workspace, cname)); + return; + } + + throw new RuntimeException(e); + } catch (Exception e) { + throw new RuntimeException(e); } - - // Assumes the workspace exists - public static void createCollectionIfNotExists(ApiClient client, String workspace, String cname) { - final CreateCollectionRequest request = new CreateCollectionRequest().name(cname); - try { - new CollectionsApi(client).create(workspace, request); - LOGGER.info(String.format("Created collection %s.%s", workspace, cname)); - } catch (ApiException e) { - if (e.getCode() == 400 && e.getErrorModel().getType() == ErrorModel.TypeEnum.ALREADYEXISTS) { - LOGGER.info(String.format("Collection %s.%s already exists", workspace, cname)); - return; - } - throw new RuntimeException(e); - } catch (Exception e) { - throw new RuntimeException(e); - } + } + + // Assumes the collection exists + public static void waitUntilCollectionReady(ApiClient client, String workspace, String cname) { + pollingConfig(workspace, cname) + .until(() -> isCollectionReady(client, workspace, cname)); + + } + + private static boolean isCollectionReady(ApiClient client, String workspace, String cname) throws Exception { + final GetCollectionResponse resp = new CollectionsApi(client).get(workspace, cname); + final Collection.StatusEnum status = resp.getData().getStatus(); + if (status == Collection.StatusEnum.READY) { + LOGGER.info(String.format("Collection %s.%s is READY", workspace, cname)); + return true; + } else { + LOGGER.info( + String.format( + "Waiting until %s.%s is READY, it is %s", workspace, cname, status.toString())); + return false; } - - // Assumes the collection exists - public static void deleteCollectionIfExists(ApiClient client, String workspace, String cname) { - try { - new CollectionsApi(client).delete(workspace, cname); - LOGGER.info(String.format("Deleted collection %s.%s", workspace, cname)); - } catch (ApiException e) { - if (e.getCode() == 404 && e.getErrorModel().getType() == ErrorModel.TypeEnum.NOTFOUND) { - LOGGER.info(String.format("Collection %s.%s does not exist", workspace, cname)); - return; - } - - throw new RuntimeException(e); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - // Assumes the collection exists - public static void waitUntilCollectionReady(ApiClient client, String workspace, String cname) { - pollingConfig(workspace, cname) - .until(() -> - isCollectionReady(client, workspace, cname) - ); - - } - - private static boolean isCollectionReady(ApiClient client, String workspace, String cname) throws Exception { - final GetCollectionResponse resp = new CollectionsApi(client).get(workspace, cname); - final Collection.StatusEnum status = resp.getData().getStatus(); - if (status == Collection.StatusEnum.READY) { - LOGGER.info(String.format("Collection %s.%s is READY", workspace, cname)); - return true; - } else { - LOGGER.info( - String.format( - "Waiting until %s.%s is READY, it is %s", workspace, cname, status.toString())); - return false; - } + } + + // Assumes the collection exists + public static void waitUntilCollectionDeleted(ApiClient client, String workspace, String cname) { + pollingConfig(workspace, cname) + .until(() -> isCollectionDeleted(client, workspace, cname)); + + } + + private static boolean isCollectionDeleted(ApiClient client, String workspace, String cname) throws Exception { + try { + new CollectionsApi(client).get(workspace, cname); + LOGGER.info( + String.format( + "Collection %s.%s still exists, waiting for deletion to complete", + workspace, cname)); + } catch (ApiException e) { + if (e.getCode() == 404 && e.getErrorModel().getType() == ErrorModel.TypeEnum.NOTFOUND) { + LOGGER.info(String.format("Collection %s.%s does not exist", workspace, cname)); + return true; + } + + throw e; } - - // Assumes the collection exists - public static void waitUntilCollectionDeleted(ApiClient client, String workspace, String cname) { - pollingConfig(workspace, cname) - .until(() -> - isCollectionDeleted(client, workspace, cname) - ); - + return false; + } + + // Assumes the collection exists + public static void waitUntilDocCount(ApiClient client, String sql, int desiredCount) { + pollingConfig(sql) + .until(() -> queryMatchesCount(client, sql, desiredCount)); + } + + private static boolean queryMatchesCount(ApiClient client, String sql, int desiredCount) throws Exception { + LOGGER.info(String.format("Running query %s", sql)); + final QueryRequestSql qrs = new QueryRequestSql(); + qrs.setQuery(sql); + + final QueryRequest qr = new QueryRequest(); + qr.setSql(qrs); + + final QueryResponse response = new QueriesApi(client).query(qr); + final int resultCount = response.getResults().size(); + + if (resultCount == desiredCount) { + LOGGER.info(String.format("Desired result count %s found", desiredCount)); + return true; + } else { + LOGGER.info( + String.format( + "Waiting for desired result count %s, current is %s", desiredCount, resultCount)); + return false; } - - private static boolean isCollectionDeleted(ApiClient client, String workspace, String cname) throws Exception { - try { - new CollectionsApi(client).get(workspace, cname); - LOGGER.info( - String.format( - "Collection %s.%s still exists, waiting for deletion to complete", - workspace, cname)); - } catch (ApiException e) { - if (e.getCode() == 404 && e.getErrorModel().getType() == ErrorModel.TypeEnum.NOTFOUND) { - LOGGER.info(String.format("Collection %s.%s does not exist", workspace, cname)); - return true; - } - - throw e; + } + + private static boolean doesCollectionExist(ApiClient client, String workspace, String cname) throws Exception { + final ListCollectionsResponse collectionsResponse = new CollectionsApi(client).workspace(workspace); + return collectionsResponse + .getData() + .stream() + .anyMatch(coll -> coll.getName().equals(cname)); + } + + public static void clearCollectionIfCollectionExists(ApiClient client, String workspace, String cname) { + Exceptions.toRuntime(() -> { + + if (!doesCollectionExist(client, workspace, cname)) { + return; + } + + final QueryRequest qr = new QueryRequest().sql(new QueryRequestSql().query(String.format("SELECT _id from %s.%s", workspace, cname))); + try { + final QueryResponse resp = new QueriesApi(client).query(qr); + final List ids = + resp.getResults().stream().map(f -> (LinkedTreeMap) f).map(f -> (String) f.get("_id")).collect(Collectors.toList()); + final DeleteDocumentsRequest ddr = new DeleteDocumentsRequest(); + for (String id : ids) { + ddr.addDataItem(new DeleteDocumentsRequestData().id(id)); } - return false; - } - - // Assumes the collection exists - public static void waitUntilDocCount(ApiClient client, String sql, int desiredCount) { - pollingConfig(sql) - .until(() -> - queryMatchesCount(client, sql, desiredCount) - ); - } + LOGGER.info("Deleting documents from " + cname); + new DocumentsApi(client).delete(workspace, cname, ddr); + } catch (Exception e) { + LOGGER.error("Error while trying to clear a collection ", e); + } - private static boolean queryMatchesCount(ApiClient client, String sql, int desiredCount) throws Exception { - LOGGER.info(String.format("Running query %s", sql)); - final QueryRequestSql qrs = new QueryRequestSql(); - qrs.setQuery(sql); - - final QueryRequest qr = new QueryRequest(); - qr.setSql(qrs); - - final QueryResponse response = new QueriesApi(client).query(qr); - final int resultCount = response.getResults().size(); - - if (resultCount == desiredCount) { - LOGGER.info(String.format("Desired result count %s found", desiredCount)); - return true; - } else { - LOGGER.info( - String.format( - "Waiting for desired result count %s, current is %s", desiredCount, resultCount)); - return false; - } - } + pollingConfig(workspace, cname) + .until(() -> isCollectionEmpty(client, workspace, cname)); - private static boolean doesCollectionExist(ApiClient client, String workspace, String cname) throws Exception { - final ListCollectionsResponse collectionsResponse = new CollectionsApi(client).workspace(workspace); - return collectionsResponse - .getData() - .stream() - .anyMatch(coll -> coll.getName().equals(cname)); - } + }); + } - public static void clearCollectionIfCollectionExists(ApiClient client, String workspace, String cname) { - Exceptions.toRuntime(() -> { - - if (!doesCollectionExist(client, workspace, cname)) { - return; - } - - final QueryRequest qr = new QueryRequest().sql(new QueryRequestSql().query(String.format("SELECT _id from %s.%s", workspace, cname))); - try { - final QueryResponse resp = new QueriesApi(client).query(qr); - final List ids = resp.getResults().stream().map(f -> (LinkedTreeMap) f).map(f -> (String) f.get("_id")).collect(Collectors.toList()); - final DeleteDocumentsRequest ddr = new DeleteDocumentsRequest(); - for (String id : ids) { - ddr.addDataItem(new DeleteDocumentsRequestData().id(id)); - } - LOGGER.info("Deleting documents from " + cname); - new DocumentsApi(client).delete(workspace, cname, ddr); - } catch (Exception e) { - LOGGER.error("Error while trying to clear a collection ", e); - } - - pollingConfig(workspace, cname) - .until(() -> - isCollectionEmpty(client, workspace, cname) - ); - - }); - } + private static boolean isCollectionEmpty(ApiClient client, String workspace, String cname) { + return Exceptions.toRuntime(() -> { + final String elementCount = String.format("SELECT count(*) as numel from %s.%s", workspace, cname); - private static boolean isCollectionEmpty(ApiClient client, String workspace, String cname) { - return Exceptions.toRuntime(() -> { - final String elementCount = String.format("SELECT count(*) as numel from %s.%s", workspace, cname); + final QueryRequest qr = new QueryRequest().sql(new QueryRequestSql().query(elementCount)); + final QueryResponse resp = new QueriesApi(client).query(qr); + Optional count = + resp.getResults().stream().map(f -> (LinkedTreeMap) f).map(f -> f.get("numel")).map(f -> (Number) f).findFirst(); + return count.filter(number -> number.intValue() == 0).isPresent(); - final QueryRequest qr = new QueryRequest().sql(new QueryRequestSql().query(elementCount)); - final QueryResponse resp = new QueriesApi(client).query(qr); - Optional count = resp.getResults().stream().map(f -> (LinkedTreeMap) f).map(f -> f.get("numel")).map(f -> (Number) f).findFirst(); - return count.filter(number -> number.intValue() == 0).isPresent(); + }); - }); + } + private static Duration jitter(String... args) { + final Hasher hsh = Hashing.murmur3_32().newHasher(); + for (String s : args) { + hsh.putString(s, Charset.defaultCharset()); } - private static Duration jitter(String... args) { - final Hasher hsh = Hashing.murmur3_32().newHasher(); - for (String s : args) { - hsh.putString(s, Charset.defaultCharset()); - } + return new Duration(Math.abs(hsh.hash().asInt()) % DEFAULT_POLL_INTERVAL.getValueInMS(), TimeUnit.MILLISECONDS); - return new Duration(Math.abs(hsh.hash().asInt()) % DEFAULT_POLL_INTERVAL.getValueInMS(), TimeUnit.MILLISECONDS); + } - } + private static ConditionFactory pollingConfig(final String... args) { + return Awaitility.await() + .timeout(DEFAULT_TIMEOUT) + .pollDelay(jitter(args)) + .pollInterval(DEFAULT_POLL_INTERVAL); + } - private static ConditionFactory pollingConfig(final String... args) { - return Awaitility.await() - .timeout(DEFAULT_TIMEOUT) - .pollDelay(jitter(args)) - .pollInterval(DEFAULT_POLL_INTERVAL); - } } diff --git a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumer.java b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumer.java index 6346aa1e1228..3822bf13e55a 100644 --- a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumer.java +++ b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumer.java @@ -1,6 +1,7 @@ /* * Copyright (c) 2021 Airbyte, Inc., all rights reserved. */ + package io.airbyte.integrations.destination.rockset; import static io.airbyte.integrations.destination.rockset.RocksetUtils.*; @@ -18,7 +19,6 @@ import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.DestinationSyncMode; - import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; @@ -32,168 +32,163 @@ import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.stream.Collectors; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RocksetWriteApiConsumer implements AirbyteMessageConsumer { - private static final Logger LOGGER = LoggerFactory.getLogger(RocksetWriteApiConsumer.class); - private static final ObjectMapper mapper = new ObjectMapper(); - // IO bound tasks, use cached thread pool - private final ExecutorService exec = Executors.newFixedThreadPool(5); - - private final ScheduledExecutorService schedExec = Executors.newSingleThreadScheduledExecutor(); - - private final String apiKey; - private final String apiServer; - private final String workspace; - - private final ConfiguredAirbyteCatalog catalog; - private final Consumer outputRecordCollector; - - // records to be sent per collection - private final Map> records; - private long lastSentDocumentMicroSeconds = 0L; - - private final RocksetSQLNameTransformer nameTransformer = new RocksetSQLNameTransformer(); - - private ApiClient client; - - public RocksetWriteApiConsumer( - JsonNode config, - ConfiguredAirbyteCatalog catalog, - Consumer outputRecordCollector) { - this.apiKey = config.get(API_KEY_ID).asText(); - this.apiServer = config.get(API_SERVER_ID).asText(); - this.workspace = config.get(ROCKSET_WORKSPACE_ID).asText(); - this.records = new HashMap<>(); - - this.catalog = catalog; - this.outputRecordCollector = outputRecordCollector; - } - - @Override - public void start() throws Exception { - this.client = RocksetUtils.apiClient(apiKey, apiServer); - LOGGER.info("Creating workspace"); - RocksetUtils.createWorkspaceIfNotExists(client, workspace); - - CompletableFuture[] overwrittenStreams = catalog.getStreams() - .stream() - .filter(s -> s.getDestinationSyncMode() == DestinationSyncMode.OVERWRITE) - .map(s -> s.getStream().getName()) - .map(nameTransformer::convertStreamName) - .map(this::emptyCollection) - .collect(Collectors.toList()) - .toArray(CompletableFuture[]::new); - - CompletableFuture[] appendStreams = catalog.getStreams().stream() - .filter(s -> s.getDestinationSyncMode() == DestinationSyncMode.APPEND) - .map(s -> s.getStream().getName()) - .map(nameTransformer::convertStreamName) - .map(this::createCollectionIntoReadyState) - .collect(Collectors.toList()) - .toArray(CompletableFuture[]::new); - - CompletableFuture initStreams = CompletableFuture.allOf( - CompletableFuture.allOf(overwrittenStreams), - CompletableFuture.allOf(appendStreams)); - - // Creating and readying many collections at once can be slow - initStreams.get(30, TimeUnit.MINUTES); - - // Schedule sending of records at a fixed rate - schedExec.scheduleAtFixedRate(this::sendBatches, 0L, 5L, TimeUnit.SECONDS); - } - - @Override - public void accept(AirbyteMessage message) throws Exception { - if (message.getType() == AirbyteMessage.Type.RECORD) { - String cname = nameTransformer.convertStreamName(message.getRecord().getStream()); - - Map obj = mapper.convertValue(message.getRecord().getData(), new TypeReference<>() { - }); - long current = ChronoUnit.MICROS.between(Instant.EPOCH, Instant.now()); - - // ensure a monotonic timestamp on records at microsecond precision. - while (current <= lastSentDocumentMicroSeconds) { - current = ChronoUnit.MICROS.between(Instant.EPOCH, Instant.now()); - } - lastSentDocumentMicroSeconds = current; - - // microsecond precision - // See https://rockset.com/docs/special-fields/#the-_event_time-field - obj.put("_event_time", current); - addRequestToBatch(obj, cname); - } else if (message.getType() == AirbyteMessage.Type.STATE) { - this.outputRecordCollector.accept(message); - } - } - - @Override - public void close() throws Exception { - // Nothing to do - LOGGER.info("Shutting down!"); - LOGGER.info("Sending final batch of records if any remain!"); - sendBatches(); - LOGGER.info("Final batch of records sent!"); - LOGGER.info("Shutting down executors"); - this.schedExec.shutdown(); - exec.shutdown(); - LOGGER.info("Executors shut down"); - } - - private void addRequestToBatch(Object document, String cname) { - synchronized (this.records) { - List collectionRecords = this.records.getOrDefault(cname, new ArrayList<>()); - collectionRecords.add(document); - this.records.put(cname, collectionRecords); - } - } - - private void sendBatches() { - List> requests; - synchronized (this.records) { - requests = this.records.entrySet().stream().filter(e -> e.getValue().size() > 0) - .map((e) -> { - AddDocumentsRequest adr = new AddDocumentsRequest(); - e.getValue().forEach(adr::addDataItem); - return Map.entry(e.getKey(), adr); - } - - ).collect(Collectors.toList()); - this.records.clear(); - } - List responses; - responses = requests.stream().map((e) -> - Exceptions.toRuntime(() -> new DocumentsApi(client).add(workspace, e.getKey(), e.getValue())) - ).collect(Collectors.toList()); - - - responses - .stream() - .flatMap(d -> d.getData().stream()) - .collect(Collectors.groupingBy(DocumentStatus::getStatus)) - .entrySet() - .stream() - .forEach((e) -> LOGGER.info("{} documents added with a status of {}", e.getValue().size(), e.getKey())); + private static final Logger LOGGER = LoggerFactory.getLogger(RocksetWriteApiConsumer.class); + private static final ObjectMapper mapper = new ObjectMapper(); + // IO bound tasks, use cached thread pool + private final ExecutorService exec = Executors.newFixedThreadPool(5); + + private final ScheduledExecutorService schedExec = Executors.newSingleThreadScheduledExecutor(); + + private final String apiKey; + private final String apiServer; + private final String workspace; + + private final ConfiguredAirbyteCatalog catalog; + private final Consumer outputRecordCollector; + + // records to be sent per collection + private final Map> records; + private long lastSentDocumentMicroSeconds = 0L; + + private final RocksetSQLNameTransformer nameTransformer = new RocksetSQLNameTransformer(); + + private ApiClient client; + + public RocksetWriteApiConsumer( + JsonNode config, + ConfiguredAirbyteCatalog catalog, + Consumer outputRecordCollector) { + this.apiKey = config.get(API_KEY_ID).asText(); + this.apiServer = config.get(API_SERVER_ID).asText(); + this.workspace = config.get(ROCKSET_WORKSPACE_ID).asText(); + this.records = new HashMap<>(); + + this.catalog = catalog; + this.outputRecordCollector = outputRecordCollector; + } + + @Override + public void start() throws Exception { + this.client = RocksetUtils.apiClient(apiKey, apiServer); + LOGGER.info("Creating workspace"); + RocksetUtils.createWorkspaceIfNotExists(client, workspace); + + CompletableFuture[] overwrittenStreams = catalog.getStreams() + .stream() + .filter(s -> s.getDestinationSyncMode() == DestinationSyncMode.OVERWRITE) + .map(s -> s.getStream().getName()) + .map(nameTransformer::convertStreamName) + .map(this::emptyCollection) + .collect(Collectors.toList()) + .toArray(CompletableFuture[]::new); + + CompletableFuture[] appendStreams = catalog.getStreams().stream() + .filter(s -> s.getDestinationSyncMode() == DestinationSyncMode.APPEND) + .map(s -> s.getStream().getName()) + .map(nameTransformer::convertStreamName) + .map(this::createCollectionIntoReadyState) + .collect(Collectors.toList()) + .toArray(CompletableFuture[]::new); + + CompletableFuture initStreams = CompletableFuture.allOf( + CompletableFuture.allOf(overwrittenStreams), + CompletableFuture.allOf(appendStreams)); + + // Creating and readying many collections at once can be slow + initStreams.get(30, TimeUnit.MINUTES); + + // Schedule sending of records at a fixed rate + schedExec.scheduleAtFixedRate(this::sendBatches, 0L, 5L, TimeUnit.SECONDS); + } + + @Override + public void accept(AirbyteMessage message) throws Exception { + if (message.getType() == AirbyteMessage.Type.RECORD) { + String cname = nameTransformer.convertStreamName(message.getRecord().getStream()); + + Map obj = mapper.convertValue(message.getRecord().getData(), new TypeReference<>() {}); + long current = ChronoUnit.MICROS.between(Instant.EPOCH, Instant.now()); + + // ensure a monotonic timestamp on records at microsecond precision. + while (current <= lastSentDocumentMicroSeconds) { + current = ChronoUnit.MICROS.between(Instant.EPOCH, Instant.now()); + } + lastSentDocumentMicroSeconds = current; + + // microsecond precision + // See https://rockset.com/docs/special-fields/#the-_event_time-field + obj.put("_event_time", current); + addRequestToBatch(obj, cname); + } else if (message.getType() == AirbyteMessage.Type.STATE) { + this.outputRecordCollector.accept(message); } - - private CompletableFuture emptyCollection(String cname) { - return CompletableFuture.runAsync(() -> { - RocksetUtils.clearCollectionIfCollectionExists(client, workspace, cname); - RocksetUtils.createCollectionIfNotExists(client, workspace, cname); - RocksetUtils.waitUntilCollectionReady(client, workspace, cname); - }, exec); + } + + @Override + public void close() throws Exception { + // Nothing to do + LOGGER.info("Shutting down!"); + LOGGER.info("Sending final batch of records if any remain!"); + sendBatches(); + LOGGER.info("Final batch of records sent!"); + LOGGER.info("Shutting down executors"); + this.schedExec.shutdown(); + exec.shutdown(); + LOGGER.info("Executors shut down"); + } + + private void addRequestToBatch(Object document, String cname) { + synchronized (this.records) { + List collectionRecords = this.records.getOrDefault(cname, new ArrayList<>()); + collectionRecords.add(document); + this.records.put(cname, collectionRecords); } - - private CompletableFuture createCollectionIntoReadyState(String cname) { - return CompletableFuture.runAsync(() -> { - RocksetUtils.createCollectionIfNotExists(client, workspace, cname); - RocksetUtils.waitUntilCollectionReady(client, workspace, cname); - }, exec); + } + + private void sendBatches() { + List> requests; + synchronized (this.records) { + requests = this.records.entrySet().stream().filter(e -> e.getValue().size() > 0) + .map((e) -> { + AddDocumentsRequest adr = new AddDocumentsRequest(); + e.getValue().forEach(adr::addDataItem); + return Map.entry(e.getKey(), adr); + } + + ).collect(Collectors.toList()); + this.records.clear(); } - + List responses; + responses = requests.stream().map((e) -> Exceptions.toRuntime(() -> new DocumentsApi(client).add(workspace, e.getKey(), e.getValue()))) + .collect(Collectors.toList()); + + responses + .stream() + .flatMap(d -> d.getData().stream()) + .collect(Collectors.groupingBy(DocumentStatus::getStatus)) + .entrySet() + .stream() + .forEach((e) -> LOGGER.info("{} documents added with a status of {}", e.getValue().size(), e.getKey())); + } + + private CompletableFuture emptyCollection(String cname) { + return CompletableFuture.runAsync(() -> { + RocksetUtils.clearCollectionIfCollectionExists(client, workspace, cname); + RocksetUtils.createCollectionIfNotExists(client, workspace, cname); + RocksetUtils.waitUntilCollectionReady(client, workspace, cname); + }, exec); + } + + private CompletableFuture createCollectionIntoReadyState(String cname) { + return CompletableFuture.runAsync(() -> { + RocksetUtils.createCollectionIfNotExists(client, workspace, cname); + RocksetUtils.waitUntilCollectionReady(client, workspace, cname); + }, exec); + } } diff --git a/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java index c9e36f04ffc8..cfd96d02dffa 100644 --- a/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java @@ -17,9 +17,7 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.lang.Exceptions; -import io.airbyte.commons.resources.MoreResources; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; - import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; @@ -30,7 +28,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.stream.Collectors; - import org.junit.jupiter.api.AfterAll; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,138 +35,139 @@ public class RocksetDestinationAcceptanceTest extends DestinationAcceptanceTest { - private static final ObjectMapper mapper = new ObjectMapper(); - private static final Set collectionsToClear = Sets.newHashSet(); - private static final Set collectionsToDelete = Sets.newHashSet(); - private static final ExecutorService tearDownExec = Executors.newCachedThreadPool(); - private static final RocksetSQLNameTransformer nameTransformer = new RocksetSQLNameTransformer(); - - private static final Logger LOGGER = - LoggerFactory.getLogger(RocksetDestinationAcceptanceTest.class); - - @Override - protected String getImageName() { - return "airbyte/destination-rockset:dev"; - } - - @Override - protected JsonNode getConfig() throws IOException { - return Jsons.deserialize(IOs.readFile(Path.of("secrets/config.json"))); + private static final ObjectMapper mapper = new ObjectMapper(); + private static final Set collectionsToClear = Sets.newHashSet(); + private static final Set collectionsToDelete = Sets.newHashSet(); + private static final ExecutorService tearDownExec = Executors.newCachedThreadPool(); + private static final RocksetSQLNameTransformer nameTransformer = new RocksetSQLNameTransformer(); + + private static final Logger LOGGER = + LoggerFactory.getLogger(RocksetDestinationAcceptanceTest.class); + + @Override + protected String getImageName() { + return "airbyte/destination-rockset:dev"; + } + + @Override + protected JsonNode getConfig() throws IOException { + return Jsons.deserialize(IOs.readFile(Path.of("secrets/config.json"))); + } + + @Override + protected JsonNode getFailCheckConfig() throws Exception { + return Jsons.jsonNode( + ImmutableMap.builder() + .put("workspace", "commons") + .put("api_key", "nope nope nope") + .build()); + } + + @Override + protected List retrieveRecords( + TestDestinationEnv testEnv, + String stream, + String namespace, + JsonNode streamSchema) + throws Exception { + + final String ws = getConfig().get("workspace").asText(); + final ApiClient client = RocksetUtils.apiClientFromConfig(getConfig()); + final String streamName = nameTransformer.convertStreamName(stream); + LOGGER.info("Retrieving records for " + streamName); + + RocksetUtils.createWorkspaceIfNotExists(client, ws); + RocksetUtils.createCollectionIfNotExists(client, ws, streamName); + RocksetUtils.waitUntilCollectionReady(client, ws, streamName); + collectionsToClear.add(streamName); + collectionsToDelete.add(streamName); + + // ORDER BY _event_time because the test suite expects to retrieve messages in the order they + // were + // originally written + final String sqlText = String.format("SELECT * FROM %s.%s ORDER BY _event_time;", ws, streamName); + + final QueryRequest query = new QueryRequest().sql(new QueryRequestSql().query(sqlText)); + + final QueriesApi queryClient = new QueriesApi(RocksetUtils.apiClientFromConfig(getConfig())); + + LOGGER.info("About to wait for indexing on " + streamName); + try { + // As Rockset is not a transactional database, we have to wait a few seconds to be extra sure + // that we've given documents enough time to be fully indexed when retrieving records + Thread.sleep(20_000); + } catch (InterruptedException e) { + e.printStackTrace(); } - - @Override - protected JsonNode getFailCheckConfig() throws Exception { - return Jsons.jsonNode( - ImmutableMap.builder() - .put("workspace", "commons") - .put("api_key", "nope nope nope") - .build()); - } - - @Override - protected List retrieveRecords( - TestDestinationEnv testEnv, String stream, String namespace, JsonNode streamSchema) - throws Exception { - - - final String ws = getConfig().get("workspace").asText(); - final ApiClient client = RocksetUtils.apiClientFromConfig(getConfig()); - final String streamName = nameTransformer.convertStreamName(stream); - LOGGER.info("Retrieving records for " + streamName); - - RocksetUtils.createWorkspaceIfNotExists(client, ws); - RocksetUtils.createCollectionIfNotExists(client, ws, streamName); - RocksetUtils.waitUntilCollectionReady(client, ws, streamName); - collectionsToClear.add(streamName); - collectionsToDelete.add(streamName); - - // ORDER BY _event_time because the test suite expects to retrieve messages in the order they - // were - // originally written - final String sqlText = String.format("SELECT * FROM %s.%s ORDER BY _event_time;", ws, streamName); - - final QueryRequest query = new QueryRequest().sql(new QueryRequestSql().query(sqlText)); - - final QueriesApi queryClient = new QueriesApi(RocksetUtils.apiClientFromConfig(getConfig())); - - LOGGER.info("About to wait for indexing on " + streamName); - try { - // As Rockset is not a transactional database, we have to wait a few seconds to be extra sure - // that we've given documents enough time to be fully indexed when retrieving records - Thread.sleep(20_000); - } catch (InterruptedException e) { - e.printStackTrace(); - } - List results = new ArrayList<>(); - int previousResultSize; - // By heuristic once the document level stabilizes, the ingestion is probably done - do { - previousResultSize = results.size(); - Thread.sleep(10_000); - final Response response = queryClient.queryCall(query, null, null).execute(); - final JsonNode json = mapper.readTree(response.body().string()); - results = Lists.newArrayList(json.get("results").iterator()); - LOGGER.info("Waiting on stable doc counts, prev= " + previousResultSize + " currrent=" + results.size()); - } while (results.size() != previousResultSize); - - return results.stream() - .peek(RocksetDestinationAcceptanceTest::dropRocksetAddedFields) - .collect(Collectors.toList()); - } - - private static void dropRocksetAddedFields(JsonNode n) { - dropFields(n, "_id", "_event_time"); + List results = new ArrayList<>(); + int previousResultSize; + // By heuristic once the document level stabilizes, the ingestion is probably done + do { + previousResultSize = results.size(); + Thread.sleep(10_000); + final Response response = queryClient.queryCall(query, null, null).execute(); + final JsonNode json = mapper.readTree(response.body().string()); + results = Lists.newArrayList(json.get("results").iterator()); + LOGGER.info("Waiting on stable doc counts, prev= " + previousResultSize + " currrent=" + results.size()); + } while (results.size() != previousResultSize); + + return results.stream() + .peek(RocksetDestinationAcceptanceTest::dropRocksetAddedFields) + .collect(Collectors.toList()); + } + + private static void dropRocksetAddedFields(JsonNode n) { + dropFields(n, "_id", "_event_time"); + } + + private static void dropFields(JsonNode node, String... fields) { + Arrays.stream(fields).forEach(((ObjectNode) node)::remove); + } + + @Override + protected void setup(TestDestinationEnv testEnv) { + // Nothing to do + } + + @Override + protected void tearDown(TestDestinationEnv testEnv) { + try { + final ApiClient client = RocksetUtils.apiClientFromConfig(getConfig()); + String workspace = getConfig().get("workspace").asText(); + collectionsToClear.stream() + .map( + cn -> CompletableFuture.runAsync(() -> { + RocksetUtils.clearCollectionIfCollectionExists(client, workspace, cn); + }, tearDownExec)) + // collect to avoid laziness of stream + .collect(Collectors.toList()) + .forEach(CompletableFuture::join); + collectionsToClear.clear(); + } catch (IOException e) { + e.printStackTrace(); } + } + + @AfterAll + public static void exitSuite() throws Exception { + LOGGER.info("Deleting all collections used during testing "); + final JsonNode config = Jsons.deserialize(IOs.readFile(Path.of("secrets/config.json"))); + final ApiClient client = RocksetUtils.apiClientFromConfig(config); + final String workspace = config.get("workspace").asText(); + collectionsToDelete.stream().map(cn -> deleteCollection(client, workspace, cn)).collect(Collectors.toList()).forEach(CompletableFuture::join); + tearDownExec.shutdown(); + + } + + private static CompletableFuture deleteCollection(ApiClient client, String workspace, String cn) { + return CompletableFuture.runAsync( + () -> Exceptions.toRuntime( + () -> { + RocksetUtils.deleteCollectionIfExists(client, workspace, cn); + RocksetUtils.waitUntilCollectionDeleted(client, workspace, cn); + Thread.sleep(2500); // Let services pick up deletion in case of re-creation + }), + tearDownExec); + } - private static void dropFields(JsonNode node, String... fields) { - Arrays.stream(fields).forEach(((ObjectNode) node)::remove); - } - - @Override - protected void setup(TestDestinationEnv testEnv) { - // Nothing to do - } - - @Override - protected void tearDown(TestDestinationEnv testEnv) { - try { - final ApiClient client = RocksetUtils.apiClientFromConfig(getConfig()); - String workspace = getConfig().get("workspace").asText(); - collectionsToClear.stream() - .map( - cn -> - CompletableFuture.runAsync(() -> { - RocksetUtils.clearCollectionIfCollectionExists(client, workspace, cn); - }, tearDownExec)) - // collect to avoid laziness of stream - .collect(Collectors.toList()) - .forEach(CompletableFuture::join); - collectionsToClear.clear(); - } catch (IOException e) { - e.printStackTrace(); - } - } - - @AfterAll - public static void exitSuite() throws Exception { - LOGGER.info("Deleting all collections used during testing "); - final JsonNode config = Jsons.deserialize(IOs.readFile(Path.of("secrets/config.json"))); - final ApiClient client = RocksetUtils.apiClientFromConfig(config); - final String workspace = config.get("workspace").asText(); - collectionsToDelete.stream().map(cn -> deleteCollection(client, workspace, cn)).collect(Collectors.toList()).forEach(CompletableFuture::join); - tearDownExec.shutdown(); - - } - - private static CompletableFuture deleteCollection(ApiClient client, String workspace, String cn) { - return CompletableFuture.runAsync( - () -> - Exceptions.toRuntime( - () -> { - RocksetUtils.deleteCollectionIfExists(client, workspace, cn); - RocksetUtils.waitUntilCollectionDeleted(client, workspace, cn); - Thread.sleep(2500); // Let services pick up deletion in case of re-creation - }), - tearDownExec); - } } diff --git a/airbyte-integrations/connectors/destination-s3/Dockerfile b/airbyte-integrations/connectors/destination-s3/Dockerfile index e68bba6018a1..16773fda6dde 100644 --- a/airbyte-integrations/connectors/destination-s3/Dockerfile +++ b/airbyte-integrations/connectors/destination-s3/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-s3 -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar +ADD build/distributions/${APPLICATION}*.tar /airbyte -RUN tar xf ${APPLICATION}.tar --strip-components=1 - -LABEL io.airbyte.version=0.1.16 +LABEL io.airbyte.version=0.2.0 LABEL io.airbyte.name=airbyte/destination-s3 diff --git a/airbyte-integrations/connectors/destination-s3/build.gradle b/airbyte-integrations/connectors/destination-s3/build.gradle index c8a3fd05c5cf..54d6e55f432f 100644 --- a/airbyte-integrations/connectors/destination-s3/build.gradle +++ b/airbyte-integrations/connectors/destination-s3/build.gradle @@ -32,6 +32,7 @@ dependencies { } testImplementation 'org.apache.commons:commons-lang3:3.11' + testImplementation "org.mockito:mockito-inline:4.1.0" integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-s3') diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java index 12fba68b1c95..3aea3ceceed3 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java @@ -12,6 +12,7 @@ import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.fasterxml.jackson.databind.JsonNode; +import java.util.Objects; /** * An S3 configuration. Typical usage sets at most one of {@code bucketPath} (necessary for more @@ -38,8 +39,7 @@ public class S3DestinationConfig { * The part size should not matter in any use case that depends on this constructor. So the default * 10 MB is used. */ - public S3DestinationConfig( - final String endpoint, + public S3DestinationConfig(final String endpoint, final String bucketName, final String bucketPath, final String bucketRegion, @@ -49,8 +49,7 @@ public S3DestinationConfig( this(endpoint, bucketName, bucketPath, bucketRegion, accessKeyId, secretAccessKey, DEFAULT_PART_SIZE_MB, formatConfig); } - public S3DestinationConfig( - final String endpoint, + public S3DestinationConfig(final String endpoint, final String bucketName, final String bucketPath, final String bucketRegion, @@ -148,4 +147,38 @@ public AmazonS3 getS3Client() { .build(); } + public S3DestinationConfig cloneWithFormatConfig(final S3FormatConfig formatConfig) { + return new S3DestinationConfig( + this.endpoint, + this.bucketName, + this.bucketPath, + this.bucketRegion, + this.accessKeyId, + this.secretAccessKey, + this.partSize, + formatConfig); + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final S3DestinationConfig that = (S3DestinationConfig) o; + return Objects.equals(endpoint, that.endpoint) && Objects.equals(bucketName, that.bucketName) && Objects.equals( + bucketPath, that.bucketPath) && Objects.equals(bucketRegion, that.bucketRegion) + && Objects.equals(accessKeyId, + that.accessKeyId) + && Objects.equals(secretAccessKey, that.secretAccessKey) && Objects.equals(partSize, that.partSize) + && Objects.equals(formatConfig, that.formatConfig); + } + + @Override + public int hashCode() { + return Objects.hash(endpoint, bucketName, bucketPath, bucketRegion, accessKeyId, secretAccessKey, partSize, formatConfig); + } + } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java index e723beacc1c8..61ed35c216d2 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java @@ -176,7 +176,7 @@ Schema getSingleFieldType(final String fieldName, final JsonSchemaType fieldType case NUMBER, INTEGER, BOOLEAN -> fieldSchema = Schema.create(fieldType.getAvroType()); case STRING -> { if (fieldDefinition.has("format")) { - String format = fieldDefinition.get("format").asText(); + final String format = fieldDefinition.get("format").asText(); fieldSchema = switch (format) { case "date-time" -> LogicalTypes.timestampMicros().addToSchema(Schema.create(Schema.Type.LONG)); case "date" -> LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT)); diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java index 7448a48b7908..20f15d79c5c4 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/S3AvroWriter.java @@ -34,6 +34,7 @@ public class S3AvroWriter extends BaseS3Writer implements S3Writer { private final StreamTransferManager uploadManager; private final MultiPartOutputStream outputStream; private final DataFileWriter dataFileWriter; + private final String objectKey; public S3AvroWriter(final S3DestinationConfig config, final AmazonS3 s3Client, @@ -45,10 +46,9 @@ public S3AvroWriter(final S3DestinationConfig config, super(config, s3Client, configuredStream); final String outputFilename = BaseS3Writer.getOutputFilename(uploadTimestamp, S3Format.AVRO); - final String objectKey = String.join("/", outputPrefix, outputFilename); + objectKey = String.join("/", outputPrefix, outputFilename); - LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), - objectKey); + LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), objectKey); this.avroRecordFactory = new AvroRecordFactory(schema, converter); this.uploadManager = S3StreamTransferManagerHelper.getDefault( @@ -83,4 +83,9 @@ protected void closeWhenFail() throws IOException { uploadManager.abort(); } + @Override + public String getOutputPath() { + return objectKey; + } + } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfig.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfig.java index 8a5b3d99d612..eaa3d1091723 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfig.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvFormatConfig.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.integrations.destination.s3.S3Format; import io.airbyte.integrations.destination.s3.S3FormatConfig; +import java.util.Objects; public class S3CsvFormatConfig implements S3FormatConfig { @@ -45,8 +46,14 @@ public String getValue() { private final Long partSize; public S3CsvFormatConfig(final JsonNode formatConfig) { - this.flattening = Flattening.fromValue(formatConfig.get("flattening").asText()); - this.partSize = formatConfig.get(PART_SIZE_MB_ARG_NAME) != null ? formatConfig.get(PART_SIZE_MB_ARG_NAME).asLong() : null; + this( + Flattening.fromValue(formatConfig.get("flattening").asText()), + formatConfig.get(PART_SIZE_MB_ARG_NAME) != null ? formatConfig.get(PART_SIZE_MB_ARG_NAME).asLong() : null); + } + + public S3CsvFormatConfig(final Flattening flattening, final Long partSize) { + this.flattening = flattening; + this.partSize = partSize; } @Override @@ -58,6 +65,7 @@ public Flattening getFlattening() { return flattening; } + @Override public Long getPartSize() { return partSize; } @@ -70,4 +78,21 @@ public String toString() { '}'; } + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final S3CsvFormatConfig that = (S3CsvFormatConfig) o; + return flattening == that.flattening && Objects.equals(partSize, that.partSize); + } + + @Override + public int hashCode() { + return Objects.hash(flattening, partSize); + } + } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java index 1a06d38390c3..500ffceff625 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriter.java @@ -33,31 +33,103 @@ public class S3CsvWriter extends BaseS3Writer implements S3Writer { private final StreamTransferManager uploadManager; private final MultiPartOutputStream outputStream; private final CSVPrinter csvPrinter; + private final String objectKey; - public S3CsvWriter(final S3DestinationConfig config, - final AmazonS3 s3Client, - final ConfiguredAirbyteStream configuredStream, - final Timestamp uploadTimestamp) + private S3CsvWriter(final S3DestinationConfig config, + final AmazonS3 s3Client, + final ConfiguredAirbyteStream configuredStream, + final Timestamp uploadTimestamp, + final int uploadThreads, + final int queueCapacity, + final boolean writeHeader, + CSVFormat csvSettings, + final CsvSheetGenerator csvSheetGenerator) throws IOException { super(config, s3Client, configuredStream); - final S3CsvFormatConfig formatConfig = (S3CsvFormatConfig) config.getFormatConfig(); - this.csvSheetGenerator = CsvSheetGenerator.Factory.create(configuredStream.getStream().getJsonSchema(), - formatConfig); + this.csvSheetGenerator = csvSheetGenerator; - final String outputFilename = BaseS3Writer.getOutputFilename(uploadTimestamp, S3Format.CSV); - final String objectKey = String.join("/", outputPrefix, outputFilename); + final String fileSuffix = "_" + UUID.randomUUID(); + final String outputFilename = BaseS3Writer.getOutputFilename(uploadTimestamp, fileSuffix, S3Format.CSV); + this.objectKey = String.join("/", outputPrefix, outputFilename); LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), objectKey); - this.uploadManager = S3StreamTransferManagerHelper.getDefault( - config.getBucketName(), objectKey, s3Client, config.getFormatConfig().getPartSize()); + this.uploadManager = S3StreamTransferManagerHelper.getDefault(config.getBucketName(), objectKey, s3Client, config.getFormatConfig().getPartSize()) + .numUploadThreads(uploadThreads) + .queueCapacity(queueCapacity); // We only need one output stream as we only have one input stream. This is reasonably performant. this.outputStream = uploadManager.getMultiPartOutputStreams().get(0); - this.csvPrinter = new CSVPrinter(new PrintWriter(outputStream, true, StandardCharsets.UTF_8), - CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL) - .withHeader(csvSheetGenerator.getHeaderRow().toArray(new String[0]))); + if (writeHeader) { + csvSettings = csvSettings.withHeader(csvSheetGenerator.getHeaderRow().toArray(new String[0])); + } + this.csvPrinter = new CSVPrinter(new PrintWriter(outputStream, true, StandardCharsets.UTF_8), csvSettings); + } + + public static class Builder { + + private final S3DestinationConfig config; + private final AmazonS3 s3Client; + private final ConfiguredAirbyteStream configuredStream; + private final Timestamp uploadTimestamp; + private int uploadThreads = S3StreamTransferManagerHelper.DEFAULT_UPLOAD_THREADS; + private int queueCapacity = S3StreamTransferManagerHelper.DEFAULT_QUEUE_CAPACITY; + private boolean withHeader = true; + private CSVFormat csvSettings = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL); + private CsvSheetGenerator csvSheetGenerator; + + public Builder(final S3DestinationConfig config, + final AmazonS3 s3Client, + final ConfiguredAirbyteStream configuredStream, + final Timestamp uploadTimestamp) { + this.config = config; + this.s3Client = s3Client; + this.configuredStream = configuredStream; + this.uploadTimestamp = uploadTimestamp; + } + + public Builder uploadThreads(final int uploadThreads) { + this.uploadThreads = uploadThreads; + return this; + } + + public Builder queueCapacity(final int queueCapacity) { + this.queueCapacity = queueCapacity; + return this; + } + + public Builder withHeader(final boolean withHeader) { + this.withHeader = withHeader; + return this; + } + + public Builder csvSettings(final CSVFormat csvSettings) { + this.csvSettings = csvSettings; + return this; + } + + public Builder csvSheetGenerator(final CsvSheetGenerator csvSheetGenerator) { + this.csvSheetGenerator = csvSheetGenerator; + return this; + } + + public S3CsvWriter build() throws IOException { + if (csvSheetGenerator == null) { + final S3CsvFormatConfig formatConfig = (S3CsvFormatConfig) config.getFormatConfig(); + csvSheetGenerator = CsvSheetGenerator.Factory.create(configuredStream.getStream().getJsonSchema(), formatConfig); + } + return new S3CsvWriter(config, + s3Client, + configuredStream, + uploadTimestamp, + uploadThreads, + queueCapacity, + withHeader, + csvSettings, + csvSheetGenerator); + } + } @Override @@ -79,4 +151,9 @@ protected void closeWhenFail() throws IOException { uploadManager.abort(); } + @Override + public String getOutputPath() { + return objectKey; + } + } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.java new file mode 100644 index 000000000000..4310a074147b --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.java @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3.csv; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.JavaBaseConstants; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import java.sql.Timestamp; +import java.time.Instant; +import java.util.List; +import java.util.UUID; + +/** + * A CsvSheetGenerator that produces data in the format expected by JdbcSqlOperations. See + * JdbcSqlOperations#createTableQuery. + *

+ * This intentionally does not extend {@link BaseSheetGenerator}, because it needs the columns in a + * different order (ABID, JSON, timestamp) vs (ABID, timestamp, JSON) + */ +public class StagingDatabaseCsvSheetGenerator implements CsvSheetGenerator { + + /** + * This method is implemented for clarity, but not actually used. S3StreamCopier disables headers on + * S3CsvWriter. + */ + @Override + public List getHeaderRow() { + return List.of( + JavaBaseConstants.COLUMN_NAME_AB_ID, + JavaBaseConstants.COLUMN_NAME_DATA, + JavaBaseConstants.COLUMN_NAME_EMITTED_AT); + } + + @Override + public List getDataRow(final UUID id, final AirbyteRecordMessage recordMessage) { + return List.of( + id, + Jsons.serialize(recordMessage.getData()), + Timestamp.from(Instant.ofEpochMilli(recordMessage.getEmittedAt()))); + } + +} diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java index b601bbfa8e6f..af90cbecccf3 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/S3JsonlWriter.java @@ -37,6 +37,7 @@ public class S3JsonlWriter extends BaseS3Writer implements S3Writer { private final StreamTransferManager uploadManager; private final MultiPartOutputStream outputStream; private final PrintWriter printWriter; + private final String objectKey; public S3JsonlWriter(final S3DestinationConfig config, final AmazonS3 s3Client, @@ -45,10 +46,9 @@ public S3JsonlWriter(final S3DestinationConfig config, super(config, s3Client, configuredStream); final String outputFilename = BaseS3Writer.getOutputFilename(uploadTimestamp, S3Format.JSONL); - final String objectKey = String.join("/", outputPrefix, outputFilename); + objectKey = String.join("/", outputPrefix, outputFilename); - LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), - objectKey); + LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), objectKey); this.uploadManager = S3StreamTransferManagerHelper.getDefault( config.getBucketName(), objectKey, s3Client, config.getFormatConfig().getPartSize()); @@ -80,4 +80,9 @@ protected void closeWhenFail() { uploadManager.abort(); } + @Override + public String getOutputPath() { + return objectKey; + } + } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetWriter.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetWriter.java index 65afc0805f14..e6b997b895ea 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetWriter.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/parquet/S3ParquetWriter.java @@ -38,6 +38,7 @@ public class S3ParquetWriter extends BaseS3Writer implements S3Writer { private final AvroRecordFactory avroRecordFactory; private final Schema schema; private final String outputFilename; + private final String objectKey; public S3ParquetWriter(final S3DestinationConfig config, final AmazonS3 s3Client, @@ -49,10 +50,9 @@ public S3ParquetWriter(final S3DestinationConfig config, super(config, s3Client, configuredStream); this.outputFilename = BaseS3Writer.getOutputFilename(uploadTimestamp, S3Format.PARQUET); - final String objectKey = String.join("/", outputPrefix, outputFilename); + objectKey = String.join("/", outputPrefix, outputFilename); - LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), - objectKey); + LOGGER.info("Full S3 path for stream '{}': s3://{}/{}", stream.getName(), config.getBucketName(), objectKey); final URI uri = new URI( String.format("s3a://%s/%s/%s", config.getBucketName(), outputPrefix, outputFilename)); @@ -118,4 +118,9 @@ protected void closeWhenFail() throws IOException { parquetWriter.close(); } + @Override + public String getOutputPath() { + return objectKey; + } + } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/BaseS3Writer.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/BaseS3Writer.java index 8ef3bf3aa3c8..ad0badae0e60 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/BaseS3Writer.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/BaseS3Writer.java @@ -37,6 +37,7 @@ public abstract class BaseS3Writer implements S3Writer { private static final Logger LOGGER = LoggerFactory.getLogger(BaseS3Writer.class); + public static final String DEFAULT_SUFFIX = "_0"; protected final S3DestinationConfig config; protected final AmazonS3 s3Client; @@ -123,14 +124,29 @@ protected void closeWhenFail() throws IOException { // Do nothing by default } - // Filename: __0. + /** + * @return A string in the format "{upload-date}_{upload-millis}_0.{format-extension}". For example, + * "2021_12_09_1639077474000_0.csv" + */ public static String getOutputFilename(final Timestamp timestamp, final S3Format format) { + return getOutputFilename(timestamp, DEFAULT_SUFFIX, format); + } + + /** + * @param customSuffix A string to append to the filename. Commonly used to distinguish multiple + * part files within a single upload. You probably want to use strings with a leading + * underscore (i.e. prefer "_0" to "0"). + * @return A string in the format "{upload-date}_{upload-millis}_{suffix}.{format-extension}". For + * example, "2021_12_09_1639077474000_customSuffix.csv" + */ + public static String getOutputFilename(final Timestamp timestamp, final String customSuffix, final S3Format format) { final DateFormat formatter = new SimpleDateFormat(S3DestinationConstants.YYYY_MM_DD_FORMAT_STRING); formatter.setTimeZone(TimeZone.getTimeZone("UTC")); return String.format( - "%s_%d_0.%s", + "%s_%d%s.%s", formatter.format(timestamp), timestamp.getTime(), + customSuffix, format.getFileExtension()); } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/ProductionWriterFactory.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/ProductionWriterFactory.java index 39041093271c..d3d883bd4d04 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/ProductionWriterFactory.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/ProductionWriterFactory.java @@ -49,7 +49,7 @@ public S3Writer create(final S3DestinationConfig config, } if (format == S3Format.CSV) { - return new S3CsvWriter(config, s3Client, configuredStream, uploadTimestamp); + return new S3CsvWriter.Builder(config, s3Client, configuredStream, uploadTimestamp).build(); } if (format == S3Format.JSONL) { diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/S3Writer.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/S3Writer.java index 46177a1f85e6..f9f5123cbe81 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/S3Writer.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/writer/S3Writer.java @@ -29,4 +29,11 @@ public interface S3Writer { */ void close(boolean hasFailed) throws IOException; + /** + * @return The path within the bucket that this writer will create. For example, if it is writing to + * "s3://yourBucket/some/path/to/file.csv", this method would return + * "some/path/to/file.csv". + */ + String getOutputPath(); + } diff --git a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriterTest.java b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriterTest.java new file mode 100644 index 000000000000..5a6a65d8f992 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/csv/S3CsvWriterTest.java @@ -0,0 +1,326 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3.csv; + +import static java.util.Collections.singletonList; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.mockConstruction; +import static org.mockito.Mockito.verify; + +import alex.mojaki.s3upload.MultiPartOutputStream; +import alex.mojaki.s3upload.StreamTransferManager; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3Client; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.integrations.destination.s3.csv.S3CsvFormatConfig.Flattening; +import io.airbyte.integrations.destination.s3.csv.S3CsvWriter.Builder; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.DestinationSyncMode; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.sql.Timestamp; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import org.apache.commons.csv.CSVFormat; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.MockedConstruction; + +class S3CsvWriterTest { + + public static final ConfiguredAirbyteStream CONFIGURED_STREAM = new ConfiguredAirbyteStream() + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(new AirbyteStream() + .withName("fake-stream") + .withNamespace("fake-namespace")); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + private static final int PART_SIZE = 7; + public static final S3DestinationConfig CONFIG = new S3DestinationConfig( + "fake-endpoint", + "fake-bucket", + "fake-bucketPath", + "fake-region", + "fake-access-key-id", + "fake-secret-access-key", + // The part size is configured in the format config. This field is only used by S3StreamCopier. + null, + new S3CsvFormatConfig(Flattening.NO, (long) PART_SIZE)); + + // equivalent to Thu, 09 Dec 2021 19:17:54 GMT + private static final Timestamp UPLOAD_TIME = Timestamp.from(Instant.ofEpochMilli(1639077474000L)); + private static final int UPLOAD_THREADS = 8; + private static final int QUEUE_CAPACITY = 9; + + // The full path would be something like + // "fake-bucketPath/fake_namespace/fake_stream/2021_12_09_1639077474000_e549e712-b89c-4272-9496-9690ba7f973e.csv" + // The namespace and stream have their hyphens replaced by underscores. Not super clear that that's + // actually required. + // 2021_12_09_1639077474000 is generated from the timestamp. It's followed by a random UUID, in case + // we need to create multiple files. + private static final String EXPECTED_OBJECT_BEGINNING = "fake-bucketPath/fake_namespace/fake_stream/2021_12_09_1639077474000_"; + private static final String EXPECTED_OBJECT_ENDING = ".csv"; + + private AmazonS3 s3Client; + + private MockedConstruction streamTransferManagerMockedConstruction; + private List streamTransferManagerConstructorArguments; + private List outputStreams; + + private record StreamTransferManagerArguments(String bucket, String object) { + + } + + @BeforeEach + public void setup() { + streamTransferManagerConstructorArguments = new ArrayList<>(); + outputStreams = new ArrayList<>(); + // This is basically RETURNS_SELF, except with getMultiPartOutputStreams configured correctly. + // Other non-void methods (e.g. toString()) will return null. + streamTransferManagerMockedConstruction = mockConstruction( + StreamTransferManager.class, + (mock, context) -> { + // Mockito doesn't seem to provide an easy way to actually retrieve these arguments later on, so + // manually store them on construction. + // _PowerMockito_ does, but I didn't want to set up that additional dependency. + final List arguments = context.arguments(); + streamTransferManagerConstructorArguments.add(new StreamTransferManagerArguments((String) arguments.get(0), (String) arguments.get(1))); + + doReturn(mock).when(mock).numUploadThreads(anyInt()); + doReturn(mock).when(mock).numStreams(anyInt()); + doReturn(mock).when(mock).queueCapacity(anyInt()); + doReturn(mock).when(mock).partSize(anyLong()); + + // We can't write a fake MultiPartOutputStream, because it doesn't have a public constructor. + // So instead, we'll build a mock that captures its data into a ByteArrayOutputStream. + final MultiPartOutputStream stream = mock(MultiPartOutputStream.class); + doReturn(singletonList(stream)).when(mock).getMultiPartOutputStreams(); + final ByteArrayOutputStream capturer = new ByteArrayOutputStream(); + outputStreams.add(capturer); + doAnswer(invocation -> { + capturer.write((int) invocation.getArgument(0)); + return null; + }).when(stream).write(anyInt()); + doAnswer(invocation -> { + capturer.write(invocation.getArgument(0)); + return null; + }).when(stream).write(any(byte[].class)); + doAnswer(invocation -> { + capturer.write(invocation.getArgument(0), invocation.getArgument(1), invocation.getArgument(2)); + return null; + }).when(stream).write(any(byte[].class), anyInt(), anyInt()); + }); + + s3Client = mock(AmazonS3Client.class); + } + + private Builder writer() { + return new Builder( + CONFIG, + s3Client, + CONFIGURED_STREAM, + UPLOAD_TIME).uploadThreads(UPLOAD_THREADS) + .queueCapacity(QUEUE_CAPACITY); + } + + @AfterEach + public void teardown() { + streamTransferManagerMockedConstruction.close(); + } + + @Test + public void generatesCorrectObjectKey_when_created() throws IOException { + final S3CsvWriter writer = writer().build(); + + final String objectKey = writer.getOutputPath(); + + checkObjectName(objectKey); + } + + @Test + public void createsExactlyOneUpload() throws IOException { + writer().build(); + + assertEquals(1, streamTransferManagerMockedConstruction.constructed().size()); + + final StreamTransferManager manager = streamTransferManagerMockedConstruction.constructed().get(0); + final StreamTransferManagerArguments args = streamTransferManagerConstructorArguments.get(0); + verify(manager).partSize(PART_SIZE); + verify(manager).numUploadThreads(UPLOAD_THREADS); + verify(manager).queueCapacity(QUEUE_CAPACITY); + assertEquals("fake-bucket", args.bucket); + checkObjectName(args.object); + } + + @Test + public void closesS3Upload_when_stagingUploaderClosedSuccessfully() throws Exception { + final S3CsvWriter writer = writer().build(); + + writer.close(false); + + final List managers = streamTransferManagerMockedConstruction.constructed(); + final StreamTransferManager manager = managers.get(0); + verify(manager).complete(); + } + + @Test + public void closesS3Upload_when_stagingUploaderClosedFailingly() throws Exception { + final S3CsvWriter writer = writer().build(); + + writer.close(true); + + final List managers = streamTransferManagerMockedConstruction.constructed(); + final StreamTransferManager manager = managers.get(0); + verify(manager).abort(); + } + + @Test + public void writesContentsCorrectly_when_headerEnabled() throws IOException { + final S3CsvWriter writer = writer().build(); + + writer.write( + UUID.fromString("f6767f7d-ce1e-45cc-92db-2ad3dfdd088e"), + new AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"foo\": 73}")) + .withEmittedAt(1234L)); + writer.write( + UUID.fromString("2b95a13f-d54f-4370-a712-1c7bf2716190"), + new AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"bar\": 84}")) + .withEmittedAt(2345L)); + writer.close(false); + + // carriage returns are required b/c RFC4180 requires it :( + assertEquals( + """ + "_airbyte_ab_id","_airbyte_emitted_at","_airbyte_data"\r + "f6767f7d-ce1e-45cc-92db-2ad3dfdd088e","1234","{""foo"":73}"\r + "2b95a13f-d54f-4370-a712-1c7bf2716190","2345","{""bar"":84}"\r + """, + outputStreams.get(0).toString(StandardCharsets.UTF_8)); + } + + @Test + public void writesContentsCorrectly_when_headerDisabled() throws IOException { + final S3CsvWriter writer = writer().withHeader(false).build(); + + writer.write( + UUID.fromString("f6767f7d-ce1e-45cc-92db-2ad3dfdd088e"), + new AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"foo\": 73}")) + .withEmittedAt(1234L)); + writer.write( + UUID.fromString("2b95a13f-d54f-4370-a712-1c7bf2716190"), + new AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"bar\": 84}")) + .withEmittedAt(2345L)); + writer.close(false); + + // carriage returns are required b/c RFC4180 requires it :( + assertEquals( + """ + "f6767f7d-ce1e-45cc-92db-2ad3dfdd088e","1234","{""foo"":73}"\r + "2b95a13f-d54f-4370-a712-1c7bf2716190","2345","{""bar"":84}"\r + """, + outputStreams.get(0).toString(StandardCharsets.UTF_8)); + } + + /** + * This test verifies that the S3StreamCopier usecase works. Specifically, the withHeader, + * csvSettings, and csvSheetGenerator options were all added solely to support S3StreamCopier; we + * want to verify that it outputs the exact same data as the previous implementation. + */ + @Test + public void writesContentsCorrectly_when_stagingDatabaseConfig() throws IOException { + final S3CsvWriter writer = new Builder( + new S3DestinationConfig( + "fake-endpoint", + "fake-bucket", + "fake-bucketPath", + "fake-region", + "fake-access-key-id", + "fake-secret-access-key", + // The part size is configured in the format config. This field is only used by S3StreamCopier. + null, + new S3CsvFormatConfig(null, (long) PART_SIZE)), + s3Client, + CONFIGURED_STREAM, + UPLOAD_TIME).uploadThreads(UPLOAD_THREADS) + .queueCapacity(QUEUE_CAPACITY) + .withHeader(false) + .csvSettings(CSVFormat.DEFAULT) + .csvSheetGenerator(new StagingDatabaseCsvSheetGenerator()) + .build(); + + writer.write( + UUID.fromString("f6767f7d-ce1e-45cc-92db-2ad3dfdd088e"), + new AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"foo\": 73}")) + .withEmittedAt(1234L)); + writer.write( + UUID.fromString("2b95a13f-d54f-4370-a712-1c7bf2716190"), + new AirbyteRecordMessage() + .withData(OBJECT_MAPPER.readTree("{\"bar\": 84}")) + .withEmittedAt(2345L)); + writer.close(false); + + // carriage returns are required b/c RFC4180 requires it :( + // Dynamically generate the timestamp because we generate in local time. + assertEquals( + String.format( + """ + f6767f7d-ce1e-45cc-92db-2ad3dfdd088e,"{""foo"":73}",%s\r + 2b95a13f-d54f-4370-a712-1c7bf2716190,"{""bar"":84}",%s\r + """, + Timestamp.from(Instant.ofEpochMilli(1234)), + Timestamp.from(Instant.ofEpochMilli(2345))), + outputStreams.get(0).toString(StandardCharsets.UTF_8)); + } + + /** + * This test really just wants to validate that: + *
    + *
  • we're dumping into the correct directory (fake-bucketPath/fake_namespace/fake_stream) and + * that the filename contains the upload time
  • + *
  • each S3CsvWriter generates a unique filename suffix (the UUID) so that they don't overwrite + * each other
  • + *
  • we generate a .csv extension
  • + *
+ * So the UUID check isn't strictly necessary. + *

+ * Eventually the output path generator should probably be injected into the S3CsvWriter (and we + * would test the generator directly + test that the writer calls the generator) + */ + private static void checkObjectName(final String objectName) { + final String errorMessage = "Object was actually " + objectName; + + assertTrue(objectName.startsWith(EXPECTED_OBJECT_BEGINNING), errorMessage); + assertTrue(objectName.endsWith(EXPECTED_OBJECT_ENDING), errorMessage); + + // Remove the beginning and ending, which _should_ leave us with just a UUID + final String uuidMaybe = objectName + // "^" == start of string + .replaceFirst("^" + EXPECTED_OBJECT_BEGINNING, "") + // "$" == end of string + .replaceFirst(EXPECTED_OBJECT_ENDING + "$", ""); + assertDoesNotThrow(() -> UUID.fromString(uuidMaybe), errorMessage); + } + +} diff --git a/airbyte-integrations/connectors/destination-scylla/Dockerfile b/airbyte-integrations/connectors/destination-scylla/Dockerfile index 2012d72996e4..eb5ce9d14ef7 100644 --- a/airbyte-integrations/connectors/destination-scylla/Dockerfile +++ b/airbyte-integrations/connectors/destination-scylla/Dockerfile @@ -3,9 +3,7 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-scylla -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/destination-scylla diff --git a/airbyte-integrations/connectors/destination-snowflake/Dockerfile b/airbyte-integrations/connectors/destination-snowflake/Dockerfile index 827233b3d5fd..797f5262b140 100644 --- a/airbyte-integrations/connectors/destination-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/destination-snowflake/Dockerfile @@ -10,13 +10,11 @@ FROM airbyte/integration-base-java:dev WORKDIR /airbyte ENV APPLICATION destination-snowflake - # Needed for JDK17 (in turn, needed on M1 macs) - see https://github.com/snowflakedb/snowflake-jdbc/issues/589#issuecomment-983944767 ENV DESTINATION_SNOWFLAKE_OPTS "--add-opens java.base/java.nio=ALL-UNNAMED" -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar +ADD build/distributions/${APPLICATION}*.tar /airbyte -RUN tar xf ${APPLICATION}.tar --strip-components=1 -LABEL io.airbyte.version=0.3.20 +LABEL io.airbyte.version=0.3.21 LABEL io.airbyte.name=airbyte/destination-snowflake diff --git a/airbyte-integrations/connectors/destination-snowflake/build.gradle b/airbyte-integrations/connectors/destination-snowflake/build.gradle index c63d242b7d01..a991fefc5fc7 100644 --- a/airbyte-integrations/connectors/destination-snowflake/build.gradle +++ b/airbyte-integrations/connectors/destination-snowflake/build.gradle @@ -26,7 +26,7 @@ application { dependencies { implementation 'com.google.cloud:google-cloud-storage:1.113.16' implementation 'com.google.auth:google-auth-library-oauth2-http:0.25.5' - implementation 'net.snowflake:snowflake-jdbc:3.12.14' + implementation 'net.snowflake:snowflake-jdbc:3.13.9' implementation 'org.apache.commons:commons-csv:1.4' implementation 'com.github.alexmojaki:s3-stream-upload:2.2.2' diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java index c484a78452af..a1548c162373 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java @@ -42,6 +42,8 @@ public static Connection getConnection(final JsonNode config) throws SQLExceptio // https://docs.snowflake.com/en/user-guide/jdbc-parameters.html#application // identify airbyte traffic to snowflake to enable partnership & optimization opportunities properties.put("application", "airbyte"); + // Needed for JDK17 - see https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow + properties.put("JDBC_QUERY_RESULT_FORMAT", "JSON"); return DriverManager.getConnection(connectUrl, properties); } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java index bf32c15e8c2f..abbebb445338 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java @@ -10,16 +10,49 @@ import io.airbyte.integrations.base.AirbyteMessageConsumer; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; +import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.UUID; import java.util.function.Consumer; public class SnowflakeInternalStagingDestination extends AbstractJdbcDestination implements Destination { + private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeInternalStagingDestination.class); + public SnowflakeInternalStagingDestination() { super("", new SnowflakeSQLNameTransformer(), new SnowflakeStagingSqlOperations()); } + @Override + public AirbyteConnectionStatus check(JsonNode config) { + SnowflakeSQLNameTransformer nameTransformer = new SnowflakeSQLNameTransformer(); + SnowflakeStagingSqlOperations snowflakeStagingSqlOperations = new SnowflakeStagingSqlOperations(); + try (final JdbcDatabase database = getDatabase(config)) { + final String outputSchema = super.getNamingResolver().getIdentifier(config.get("schema").asText()); + attemptSQLCreateAndDropTableOperations(outputSchema, database, nameTransformer, snowflakeStagingSqlOperations); + attemptSQLCreateAndDropStages(outputSchema, database, nameTransformer, snowflakeStagingSqlOperations); + return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); + } catch (final Exception e) { + LOGGER.error("Exception while checking connection: ", e); + return new AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED) + .withMessage("Could not connect with provided configuration. \n" + e.getMessage()); + } + } + + private static void attemptSQLCreateAndDropStages(String outputSchema, JdbcDatabase database, SnowflakeSQLNameTransformer namingResolver, SnowflakeStagingSqlOperations sqlOperations) throws Exception { + + // verify we have permissions to create/drop stage + final String outputTableName = namingResolver.getIdentifier("_airbyte_connection_test_" + UUID.randomUUID().toString().replaceAll("-", "")); + String stageName = namingResolver.getStageName(outputSchema, outputTableName);; + sqlOperations.createStageIfNotExists(database, stageName); + sqlOperations.dropStageIfExists(database,stageName); + } + @Override protected JdbcDatabase getDatabase(final JsonNode config) { return SnowflakeDatabase.getDatabase(config); diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopier.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopier.java index 997d8838cee3..8893effcc2bd 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopier.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopier.java @@ -5,18 +5,17 @@ package io.airbyte.integrations.destination.snowflake; import com.amazonaws.services.s3.AmazonS3; -import io.airbyte.commons.string.Strings; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; -import io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopier; +import io.airbyte.integrations.destination.jdbc.copy.s3.LegacyS3StreamCopier; import io.airbyte.integrations.destination.s3.S3DestinationConfig; import io.airbyte.protocol.models.DestinationSyncMode; import java.sql.SQLException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class SnowflakeS3StreamCopier extends S3StreamCopier { +public class SnowflakeS3StreamCopier extends LegacyS3StreamCopier { private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeS3StreamCopier.class); private static final int FILE_PREFIX_LENGTH = 5; @@ -30,13 +29,11 @@ public SnowflakeS3StreamCopier(final String stagingFolder, final S3DestinationConfig s3Config, final ExtendedNameTransformer nameTransformer, final SqlOperations sqlOperations) { - super(stagingFolder, destSyncMode, schema, streamName, Strings.addRandomSuffix("", "", FILE_PREFIX_LENGTH) + "_" + streamName, - client, db, s3Config, nameTransformer, sqlOperations); + super(stagingFolder, destSyncMode, schema, streamName, client, db, s3Config, nameTransformer, sqlOperations); } @Override - public void copyS3CsvFileIntoTable( - final JdbcDatabase database, + public void copyS3CsvFileIntoTable(final JdbcDatabase database, final String s3FileLocation, final String schema, final String tableName, diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierFactory.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierFactory.java index dcf2697a9790..d2d9139af518 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierFactory.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierFactory.java @@ -9,11 +9,11 @@ import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.copy.StreamCopier; -import io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopierFactory; +import io.airbyte.integrations.destination.jdbc.copy.s3.LegacyS3StreamCopierFactory; import io.airbyte.integrations.destination.s3.S3DestinationConfig; import io.airbyte.protocol.models.DestinationSyncMode; -public class SnowflakeS3StreamCopierFactory extends S3StreamCopierFactory { +public class SnowflakeS3StreamCopierFactory extends LegacyS3StreamCopierFactory { @Override public StreamCopier create(final String stagingFolder, diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/Dockerfile b/airbyte-integrations/connectors/source-amazon-seller-partner/Dockerfile index 3f8901bb75af..e90692d9433a 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/Dockerfile +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.5 +LABEL io.airbyte.version=0.2.6 LABEL io.airbyte.name=airbyte/source-amazon-seller-partner diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-amazon-seller-partner/integration_tests/configured_catalog.json index 5c897b44e0b2..0387865e646d 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/integration_tests/configured_catalog.json @@ -104,6 +104,15 @@ "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["Date"] + }, + { + "stream": { + "name": "GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" } ] } diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-amazon-seller-partner/integration_tests/sample_state.json index afedf14fa041..bedd80958e13 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/integration_tests/sample_state.json @@ -28,5 +28,8 @@ }, "GET_SELLER_FEEDBACK_DATA": { "createdTime": "2021-07-01T00:00:00Z" + }, + "GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT": { + "createdTime": "2021-07-01T00:00:00Z" } } diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT.json b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT.json new file mode 100644 index 000000000000..12ca9e762138 --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT.json @@ -0,0 +1,29 @@ +{ + "title": "Brand Analytics Search Terms Reports", + "description": "Brand Analytics Search Terms Reports", + "type": "object", + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "departmentName": { + "type": ["null", "string"] + }, + "searchTerm": { + "type": ["null", "string"] + }, + "searchFrequencyRank": { + "type": ["null", "number"] + }, + "clickedAsin": { + "type": ["null", "string"] + }, + "clickShareRank": { + "type": ["null", "number"] + }, + "clickShare": { + "type": ["null", "number"] + }, + "conversionShare": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py index 72c119193bb8..0221c75cb14d 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py @@ -15,6 +15,7 @@ from source_amazon_seller_partner.auth import AWSAuthenticator, AWSSignature from source_amazon_seller_partner.constants import AWSEnvironment, AWSRegion, get_marketplaces from source_amazon_seller_partner.streams import ( + BrandAnalyticsSearchTermsReports, FbaInventoryReports, FbaOrdersReports, FbaShipmentsReports, @@ -44,6 +45,11 @@ class Config: description="Will be used for stream slicing for initial full_refresh sync when no updated state is present for reports that support sliced incremental sync.", examples=["30", "365"], ) + report_options: str = Field( + None, + description="Additional information passed to reports. This varies by report type. Must be a valid json string.", + examples=['{"GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT": {"reportPeriod": "WEEK"}}', '{"GET_SOME_REPORT": {"custom": "true"}}'], + ) refresh_token: str = Field( description="The Refresh Token obtained via OAuth flow authorization.", title="Refresh Token", @@ -98,6 +104,7 @@ def _get_stream_kwargs(self, config: ConnectorConfig) -> Mapping[str, Any]: "replication_start_date": config.replication_start_date, "marketplace_ids": [marketplace_id], "period_in_days": config.period_in_days, + "report_options": config.report_options, } return stream_kwargs @@ -136,6 +143,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: VendorInventoryHealthReports(**stream_kwargs), Orders(**stream_kwargs), SellerFeedbackReports(**stream_kwargs), + BrandAnalyticsSearchTermsReports(**stream_kwargs), ] def spec(self, *args, **kwargs) -> ConnectorSpecification: diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/spec.json b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/spec.json index c40da80387e0..34c23685b239 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/spec.json +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/spec.json @@ -19,6 +19,15 @@ "type": "integer", "default": 30 }, + "report_options": { + "title": "Report Options", + "description": "Additional information passed to reports. This varies by report type. Must be a valid json string.", + "examples": [ + "{\"GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT\": {\"reportPeriod\": \"WEEK\"}}", + "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}" + ], + "type": "string" + }, "refresh_token": { "title": "Refresh Token", "description": "The Refresh Token obtained via OAuth flow authorization.", diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py index c2c83da1f793..0dff4b756910 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py @@ -46,6 +46,7 @@ def __init__( replication_start_date: str, marketplace_ids: List[str], period_in_days: Optional[int], + report_options: Optional[str], *args, **kwargs, ): @@ -151,6 +152,7 @@ def __init__( replication_start_date: str, marketplace_ids: List[str], period_in_days: Optional[int], + report_options: Optional[str], authenticator: HttpAuthenticator = NoAuth(), ): self._authenticator = authenticator @@ -160,6 +162,7 @@ def __init__( self._replication_start_date = replication_start_date self.marketplace_ids = marketplace_ids self.period_in_days = period_in_days + self._report_options = report_options @property def url_base(self) -> str: @@ -282,9 +285,16 @@ def parse_response(self, response: requests.Response) -> Iterable[Mapping]: payload, ) - document_records = csv.DictReader(StringIO(document), delimiter="\t") + document_records = self.parse_document(document) yield from document_records + @staticmethod + def parse_document(document): + return csv.DictReader(StringIO(document), delimiter="\t") + + def report_options(self) -> Mapping[str, Any]: + return json_lib.loads(self._report_options).get(self.name) + def read_records( self, sync_mode: SyncMode, @@ -378,6 +388,70 @@ class VendorInventoryHealthReports(ReportsAmazonSPStream): name = "GET_VENDOR_INVENTORY_HEALTH_AND_PLANNING_REPORT" +class BrandAnalyticsSearchTermsReports(ReportsAmazonSPStream): + """ + Field definitions: https://sellercentral.amazon.co.uk/help/hub/reference/G5NXWNY8HUD3VDCW + """ + + name = "GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT" + + @staticmethod + def parse_document(document): + parsed = json_lib.loads(document) + return parsed.get("dataByDepartmentAndSearchTerm", {}) + + def _report_data( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Mapping[str, Any]: + data = super()._report_data(sync_mode, cursor_field, stream_slice, stream_state) + options = self.report_options() + if options is not None: + data.update(self._augmented_data(options)) + + return data + + @staticmethod + def _augmented_data(report_options) -> Mapping[str, Any]: + if report_options.get("reportPeriod") is None: + return {} + else: + now = pendulum.now("utc") + if report_options["reportPeriod"] == "DAY": + now = now.subtract(days=1) + data_start_time = now.start_of("day") + data_end_time = now.end_of("day") + elif report_options["reportPeriod"] == "WEEK": + now = now.subtract(weeks=1) + + # According to report api docs + # dataStartTime must be a Sunday and dataEndTime must be the following Saturday + pendulum.week_starts_at(pendulum.SUNDAY) + pendulum.week_ends_at(pendulum.SATURDAY) + + data_start_time = now.start_of("week") + data_end_time = now.end_of("week") + + # Reset week start and end + pendulum.week_starts_at(pendulum.MONDAY) + pendulum.week_ends_at(pendulum.SUNDAY) + elif report_options["reportPeriod"] == "MONTH": + now = now.subtract(months=1) + data_start_time = now.start_of("month") + data_end_time = now.end_of("month") + else: + raise Exception([{"message": "This reportPeriod is not implemented."}]) + + return { + "dataStartTime": data_start_time.strftime(DATE_TIME_FORMAT), + "dataEndTime": data_end_time.strftime(DATE_TIME_FORMAT), + "reportOptions": report_options, + } + + class IncrementalReportsAmazonSPStream(ReportsAmazonSPStream): @property @abstractmethod diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/test_repots_streams_rate_limits.py b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/test_repots_streams_rate_limits.py index a258b226180f..0aa1d5ec9ab0 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/test_repots_streams_rate_limits.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/test_repots_streams_rate_limits.py @@ -28,6 +28,7 @@ def reports_stream(): marketplace_ids=["id"], authenticator=NoAuth(), period_in_days=0, + report_options=None, ) return stream diff --git a/airbyte-integrations/connectors/source-bigquery/Dockerfile b/airbyte-integrations/connectors/source-bigquery/Dockerfile index fbbb5db29ab4..19da59e7fae4 100644 --- a/airbyte-integrations/connectors/source-bigquery/Dockerfile +++ b/airbyte-integrations/connectors/source-bigquery/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-bigquery -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte # Airbyte's build system uses these labels to know what to name and tag the docker images produced by this Dockerfile. LABEL io.airbyte.version=0.1.4 diff --git a/airbyte-integrations/connectors/source-bing-ads/Dockerfile b/airbyte-integrations/connectors/source-bing-ads/Dockerfile index 77896c91a989..57d8f148428a 100644 --- a/airbyte-integrations/connectors/source-bing-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-bing-ads/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/source-bing-ads diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/spec.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/spec.json index dc6213a6d9a7..5821a42de682 100644 --- a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/spec.json +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/spec.json @@ -23,7 +23,7 @@ "accounts": { "title": "Accounts", "type": "object", - "description": "Account selection strategy.", + "description": "Account selection.", "oneOf": [ { "title": "All accounts assigned to your user", @@ -41,7 +41,7 @@ { "title": "Subset of your accounts", "additionalProperties": false, - "description": "Fetch data for subset of account ids.", + "description": "Fetch data for subset of account IDs.", "required": ["ids", "selection_strategy"], "properties": { "selection_strategy": { @@ -51,6 +51,7 @@ }, "ids": { "type": "array", + "title": "IDs", "description": "List of accounts from which data will be fetched.", "items": { "type": "string" @@ -64,37 +65,44 @@ }, "client_id": { "type": "string", - "description": "ID of your Microsoft Advertising client application.", + "title": "Client ID", + "description": "The Client ID of your Microsoft Advertising developer application.", "airbyte_secret": true }, "client_secret": { "type": "string", - "description": "Secret of your Microsoft Advertising client application.", + "title": "Client Secret", + "description": "The Client Secret of your Microsoft Advertising developer application.", "airbyte_secret": true }, "customer_id": { "type": "string", - "description": "User's customer ID." + "title": "Customer ID", + "description": "User's Customer ID. Go to your Accounts and Billing page. Your Customer ID will be listed on the Accounts tab under the heading Customer." }, "developer_token": { "type": "string", + "title": "Developer Token", "description": "Developer token associated with user.", "airbyte_secret": true }, "refresh_token": { "type": "string", - "description": "The long-lived Refresh token received via grant_type=refresh_token request.", + "title": "Refresh Token", + "description": "Refresh Token to renew the expired Access Token.", "airbyte_secret": true }, "user_id": { "type": "string", - "description": "Unique user identifier." + "title": "Account ID", + "description": "Bing Ads Account ID. You can find Account ID by going to your profile and selecting Accounts and Billing." }, "reports_start_date": { "type": "string", + "title": "Reports Start Date", "format": "date", "default": "2020-01-01", - "description": "From which date perform initial sync for report related streams. In YYYY-MM-DD format" + "description": "UTC date in YYYY-MM-DD format. Any reports before this date will not be replicated." }, "hourly_reports": { "title": "Hourly reports", diff --git a/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/Dockerfile index acbfdee8c378..e38a1a377d0c 100644 --- a/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-clickhouse-strict-encrypt/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-clickhouse-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/source-clickhouse-strict-encrypt diff --git a/airbyte-integrations/connectors/source-clickhouse/Dockerfile b/airbyte-integrations/connectors/source-clickhouse/Dockerfile index 6cdd5a98bda9..0598cd527acd 100644 --- a/airbyte-integrations/connectors/source-clickhouse/Dockerfile +++ b/airbyte-integrations/connectors/source-clickhouse/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-clickhouse -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar +ADD build/distributions/${APPLICATION}*.tar /airbyte -RUN tar xf ${APPLICATION}.tar --strip-components=1 - -LABEL io.airbyte.version=0.1.5 +LABEL io.airbyte.version=0.1.6 LABEL io.airbyte.name=airbyte/source-clickhouse diff --git a/airbyte-integrations/connectors/source-clickhouse/src/main/resources/spec.json b/airbyte-integrations/connectors/source-clickhouse/src/main/resources/spec.json index 6d9250a966d8..ced34efdfbf5 100644 --- a/airbyte-integrations/connectors/source-clickhouse/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-clickhouse/src/main/resources/spec.json @@ -8,11 +8,13 @@ "additionalProperties": false, "properties": { "host": { - "description": "Host Endpoint of the Clickhouse Cluster", + "description": "The host endpoint of the Clickhouse cluster.", + "title": "Host", "type": "string" }, "port": { - "description": "Port of the database.", + "description": "The port of the database.", + "title": "Port", "type": "integer", "minimum": 0, "maximum": 65536, @@ -20,16 +22,19 @@ "examples": ["8123"] }, "database": { - "description": "Name of the database.", + "description": "The name of the database.", + "title": "Database", "type": "string", "examples": ["default"] }, "username": { - "description": "Username to use to access the database.", + "description": "The username which is used to access the database.", + "title": "Username", "type": "string" }, "password": { - "description": "Password associated with the username.", + "description": "The password associated with this username.", + "title": "Password", "type": "string", "airbyte_secret": true }, diff --git a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile index f97162c92482..b7bbd8756b1b 100644 --- a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-cockroachdb-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/source-cockroachdb-strict-encrypt \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-cockroachdb/Dockerfile b/airbyte-integrations/connectors/source-cockroachdb/Dockerfile index e7fa7e0b9010..01c8731e4d85 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/Dockerfile +++ b/airbyte-integrations/connectors/source-cockroachdb/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-cockroachdb -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.3 LABEL io.airbyte.name=airbyte/source-cockroachdb \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile index 21e649ceebc2..8ad59bc0dd4d 100644 --- a/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-db2-strict-encrypt/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-db2-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/source-db2-strict-encrypt diff --git a/airbyte-integrations/connectors/source-db2/Dockerfile b/airbyte-integrations/connectors/source-db2/Dockerfile index e27e2ec0b5f1..71efa073e358 100644 --- a/airbyte-integrations/connectors/source-db2/Dockerfile +++ b/airbyte-integrations/connectors/source-db2/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-db2 -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.3 LABEL io.airbyte.name=airbyte/source-db2 diff --git a/airbyte-integrations/connectors/source-drift/Dockerfile b/airbyte-integrations/connectors/source-drift/Dockerfile index af63be627b86..7488ae6fdf65 100644 --- a/airbyte-integrations/connectors/source-drift/Dockerfile +++ b/airbyte-integrations/connectors/source-drift/Dockerfile @@ -34,5 +34,5 @@ COPY source_drift ./source_drift ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.4 +LABEL io.airbyte.version=0.2.5 LABEL io.airbyte.name=airbyte/source-drift diff --git a/airbyte-integrations/connectors/source-drift/source_drift/spec.json b/airbyte-integrations/connectors/source-drift/source_drift/spec.json index ce9cb7b7096b..3b1ad1f39f9c 100644 --- a/airbyte-integrations/connectors/source-drift/source_drift/spec.json +++ b/airbyte-integrations/connectors/source-drift/source_drift/spec.json @@ -49,7 +49,7 @@ "refresh_token": { "type": "string", "title": "Refresh Token", - "description": "Refresh Token to renew the expired access_token.", + "description": "Refresh Token to renew the expired Access Token.", "default": "", "airbyte_secret": true } diff --git a/airbyte-integrations/connectors/source-e2e-test/Dockerfile b/airbyte-integrations/connectors/source-e2e-test/Dockerfile index 7fd2c3e8e991..76e4664936b0 100644 --- a/airbyte-integrations/connectors/source-e2e-test/Dockerfile +++ b/airbyte-integrations/connectors/source-e2e-test/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-e2e-test -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar +ADD build/distributions/${APPLICATION}*.tar /airbyte -RUN tar xf ${APPLICATION}.tar --strip-components=1 - -LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.version=0.1.1 LABEL io.airbyte.name=airbyte/source-e2e-test diff --git a/airbyte-integrations/connectors/source-e2e-test/src/main/java/io/airbyte/integrations/source/e2e_test/InfiniteFeedSource.java b/airbyte-integrations/connectors/source-e2e-test/src/main/java/io/airbyte/integrations/source/e2e_test/InfiniteFeedSource.java index f93f91c8fc52..99fe7d5cf552 100644 --- a/airbyte-integrations/connectors/source-e2e-test/src/main/java/io/airbyte/integrations/source/e2e_test/InfiniteFeedSource.java +++ b/airbyte-integrations/connectors/source-e2e-test/src/main/java/io/airbyte/integrations/source/e2e_test/InfiniteFeedSource.java @@ -25,6 +25,7 @@ import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaPrimitive; import java.time.Instant; +import java.util.Optional; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Predicate; import org.slf4j.Logger; @@ -53,7 +54,7 @@ public AutoCloseableIterator read(final JsonNode config, final C final Predicate anotherRecordPredicate = config.has("max_records") ? recordNumber -> recordNumber < config.get("max_records").asLong() : recordNumber -> true; - final long sleepTime = config.has("message_interval") ? config.get("message_interval").asLong() : 3000L; + final Optional sleepTime = Optional.ofNullable(config.get("message_interval")).map(JsonNode::asLong); final AtomicLong i = new AtomicLong(); @@ -63,11 +64,13 @@ public AutoCloseableIterator read(final JsonNode config, final C protected AirbyteMessage computeNext() { if (anotherRecordPredicate.test(i.get())) { if (i.get() != 0) { - try { - LOGGER.info("sleeping for {} ms", sleepTime); - sleep(sleepTime); - } catch (final InterruptedException e) { - throw new RuntimeException(); + if (sleepTime.isPresent()) { + try { + LOGGER.info("sleeping for {} ms", sleepTime.get()); + sleep(sleepTime.get()); + } catch (final InterruptedException e) { + throw new RuntimeException(e); + } } } i.incrementAndGet(); diff --git a/airbyte-integrations/connectors/source-e2e-test/src/main/resources/spec.json b/airbyte-integrations/connectors/source-e2e-test/src/main/resources/spec.json index 88729274d7aa..860e07cae9d7 100644 --- a/airbyte-integrations/connectors/source-e2e-test/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-e2e-test/src/main/resources/spec.json @@ -24,8 +24,8 @@ }, { "title": "Infinite Feed", - "required": ["type", "max_records", "message_interval"], - "additionalProperties": false, + "required": ["type", "max_records"], + "additionalProperties": true, "properties": { "type": { "type": "string", diff --git a/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile b/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile index 87c2dd28e8d9..24f71275ff24 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile +++ b/airbyte-integrations/connectors/source-facebook-marketing/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.26 +LABEL io.airbyte.version=0.2.29 LABEL io.airbyte.name=airbyte/source-facebook-marketing diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json index 2c6b7bbcd205..1f4e623f6368 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json @@ -32,9 +32,15 @@ "type": "string", "format": "date-time" }, + "fetch_thumbnail_images": { + "title": "Fetch Thumbnail Images", + "description": "In each Ad Creative, fetch the thumbnail_url and store the result in thumbnail_data_url", + "default": false, + "type": "boolean" + }, "include_deleted": { "title": "Include Deleted", - "description": "Include data from deleted campaigns, ads, and adsets.", + "description": "Include data from deleted campaigns, ads, and adsets", "default": false, "type": "boolean" }, @@ -48,7 +54,7 @@ }, "insights_days_per_job": { "title": "Insights Days Per Job", - "description": "Number of days to sync in one job. The more data you have - the smaller you want this parameter to be.", + "description": "Number of days to sync in one job (the more data you have, the smaller this parameter should be)", "default": 7, "minimum": 1, "maximum": 30, diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/api.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/api.py index 1858c86f77cd..9ee3d73c4f8a 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/api.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/api.py @@ -50,8 +50,16 @@ def parse_call_rate_header(headers): usage_header_business_loaded = json.loads(usage_header_business) for business_object_id in usage_header_business_loaded: usage_limits = usage_header_business_loaded.get(business_object_id)[0] - usage = max(usage, usage_limits.get("call_count"), usage_limits.get("total_cputime"), usage_limits.get("total_time")) - pause_interval = max(pause_interval, pendulum.duration(minutes=usage_limits.get("estimated_time_to_regain_access", 0))) + usage = max( + usage, + usage_limits.get("call_count"), + usage_limits.get("total_cputime"), + usage_limits.get("total_time"), + ) + pause_interval = max( + pause_interval, + pendulum.duration(minutes=usage_limits.get("estimated_time_to_regain_access", 0)), + ) return usage, pause_interval diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_creatives.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_creatives.json index eb443dab21a7..a33dfd4c58b7 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_creatives.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ad_creatives.json @@ -835,6 +835,9 @@ } } }, + "thumbnail_data_url": { + "type": ["null", "string"] + }, "thumbnail_url": { "type": ["null", "string"] }, diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights_action_breakdowns.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights_action_breakdowns.json new file mode 100644 index 000000000000..ac976913bc5d --- /dev/null +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights_action_breakdowns.json @@ -0,0 +1,14 @@ +{ + "properties": { + "action_device": { "type": ["null", "string"] }, + "action_canvas_component_name": { "type": ["null", "string"] }, + "action_carousel_card_id": { "type": ["null", "string"] }, + "action_carousel_card_name": { "type": ["null", "string"] }, + "action_destination": { "type": ["null", "string"] }, + "action_reaction": { "type": ["null", "string"] }, + "action_target_id": { "type": ["null", "string"] }, + "action_type": { "type": ["null", "string"] }, + "action_video_sound": { "type": ["null", "string"] }, + "action_video_type": { "type": ["null", "string"] } + } +} diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights_breakdowns.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights_breakdowns.json index 87d31b9854b9..cb8824fbf5e1 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights_breakdowns.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights_breakdowns.json @@ -1,15 +1,5 @@ { "properties": { - "action_device": { "type": ["null", "string"] }, - "action_canvas_component_name": { "type": ["null", "string"] }, - "action_carousel_card_id": { "type": ["null", "string"] }, - "action_carousel_card_name": { "type": ["null", "string"] }, - "action_destination": { "type": ["null", "string"] }, - "action_reaction": { "type": ["null", "string"] }, - "action_target_id": { "type": ["null", "string"] }, - "action_type": { "type": ["null", "string"] }, - "action_video_sound": { "type": ["null", "string"] }, - "action_video_type": { "type": ["null", "string"] }, "ad_format_asset": { "type": ["null", "string"] }, "age": { "type": ["null", "string"] }, "app_id": { "type": ["null", "string"] }, diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/campaigns.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/campaigns.json index 5b3b8a89dff9..e0a2d8c0c33b 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/campaigns.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/campaigns.json @@ -1,55 +1,116 @@ { "properties": { - "name": { + "account_id": { "type": ["null", "string"] }, - "objective": { - "type": ["null", "string"] + "adlabels": { + "type": ["null", "array"], + "items": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "created_time": { + "type": "string", + "format": "date-time" + }, + "updated_time": { + "type": "string", + "format": "date-time" + } + } + } }, - "id": { + "bid_strategy": { "type": ["null", "string"] }, - "account_id": { - "type": ["null", "string"] + "budget_rebalance_flag": { + "type": ["null", "boolean"] }, - "effective_status": { - "type": ["null", "string"] + "budget_remaining": { + "type": ["null", "number"] }, "buying_type": { "type": ["null", "string"] }, - "spend_cap": { + "daily_budget": { "type": ["null", "number"] }, - "start_time": { + "created_time": { "type": "string", "format": "date-time" }, - "updated_time": { - "type": "string", - "format": "date-time" + "effective_status": { + "type": ["null", "string"] }, - "adlabels": { + "id": { + "type": ["null", "string"] + }, + "issues_info": { "type": ["null", "array"], "items": { "type": "object", "properties": { - "id": { + "error_code": { "type": "string" }, - "name": { + "error_message": { "type": "string" }, - "created_time": { - "type": "string", - "format": "date-time" + "error_summary": { + "type": "string" }, - "updated_time": { - "type": "string", - "format": "date-time" + "error_type": { + "type": "string" + }, + "level": { + "type": "string" } } } + }, + "lifetime_budget": { + "type": ["null", "number"] + }, + "name": { + "type": ["null", "string"] + }, + "objective": { + "type": ["null", "string"] + }, + "smart_promotion_type": { + "type": ["null", "string"] + }, + "source_campaign_id": { + "type": ["null", "number"] + }, + "special_ad_category": { + "type": ["null", "string"] + }, + "special_ad_category_country": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "spend_cap": { + "type": ["null", "number"] + }, + "start_time": { + "type": "string", + "format": "date-time" + }, + "stop_time": { + "type": "string", + "format": "date-time" + }, + "updated_time": { + "type": "string", + "format": "date-time" } }, "type": ["null", "object"] diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/source.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/source.py index 0150bd0ec475..72f1328a7151 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/source.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/source.py @@ -76,7 +76,11 @@ class Config: default_factory=pendulum.now, ) - include_deleted: bool = Field(default=False, description="Include data from deleted campaigns, ads, and adsets.") + fetch_thumbnail_images: bool = Field( + default=False, description="In each Ad Creative, fetch the thumbnail_url and store the result in thumbnail_data_url" + ) + + include_deleted: bool = Field(default=False, description="Include data from deleted campaigns, ads, and adsets") insights_lookback_window: int = Field( default=28, @@ -87,7 +91,7 @@ class Config: insights_days_per_job: int = Field( default=7, - description="Number of days to sync in one job. The more data you have - the smaller you want this parameter to be.", + description="Number of days to sync in one job (the more data you have, the smaller this parameter should be)", minimum=1, maximum=30, ) @@ -137,7 +141,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Type[Stream]]: Campaigns(api=api, start_date=config.start_date, end_date=config.end_date, include_deleted=config.include_deleted), AdSets(api=api, start_date=config.start_date, end_date=config.end_date, include_deleted=config.include_deleted), Ads(api=api, start_date=config.start_date, end_date=config.end_date, include_deleted=config.include_deleted), - AdCreatives(api=api), + AdCreatives(api=api, fetch_thumbnail_images=config.fetch_thumbnail_images), AdsInsights(**insights_args), AdsInsightsAgeAndGender(**insights_args), AdsInsightsCountry(**insights_args), @@ -204,13 +208,10 @@ def _update_insights_streams(self, insights, args, streams) -> List[Type[Stream] def _check_custom_insights_entries(self, insights: List[Mapping[str, Any]]): - default_fields = list( - ResourceSchemaLoader(package_name_from_class(self.__class__)).get_schema("ads_insights").get("properties", {}).keys() - ) - default_breakdowns = list( - ResourceSchemaLoader(package_name_from_class(self.__class__)).get_schema("ads_insights_breakdowns").get("properties", {}).keys() - ) - default_actions_breakdowns = [e for e in default_breakdowns if "action_" in e] + loader = ResourceSchemaLoader(package_name_from_class(self.__class__)) + default_fields = list(loader.get_schema("ads_insights").get("properties", {}).keys()) + default_breakdowns = list(loader.get_schema("ads_insights_breakdowns").get("properties", {}).keys()) + default_action_breakdowns = list(loader.get_schema("ads_insights_action_breakdowns").get("properties", {}).keys()) for insight in insights: if insight.get("fields"): @@ -224,7 +225,7 @@ def _check_custom_insights_entries(self, insights: List[Mapping[str, Any]]): message = f"{value} is not a valid breakdown name" raise Exception("Config validation error: " + message) from None if insight.get("action_breakdowns"): - value_checked, value = self._check_values(default_actions_breakdowns, insight.get("action_breakdowns")) + value_checked, value = self._check_values(default_action_breakdowns, insight.get("action_breakdowns")) if not value_checked: message = f"{value} is not a valid action_breakdown name" raise Exception("Config validation error: " + message) from None diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams.py index c0d236a80e70..fe0013a8966e 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams.py @@ -2,6 +2,7 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # +import base64 import time import urllib.parse as urlparse from abc import ABC @@ -12,6 +13,7 @@ import airbyte_cdk.sources.utils.casing as casing import backoff import pendulum +import requests from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.core import package_name_from_class @@ -43,6 +45,18 @@ def remove_params_from_url(url: str, params: List[str]) -> str: ) +def fetch_thumbnail_data_url(url: str) -> str: + try: + response = requests.get(url) + if response.status_code == 200: + type = response.headers["content-type"] + data = base64.b64encode(response.content) + return f"data:{type};base64,{data.decode('ascii')}" + except requests.exceptions.RequestException: + pass + return None + + class FBMarketingStream(Stream, ABC): """Base stream class""" @@ -198,6 +212,10 @@ class AdCreatives(FBMarketingStream): entity_prefix = "adcreative" batch_size = 50 + def __init__(self, fetch_thumbnail_images: bool = False, **kwargs): + super().__init__(**kwargs) + self._fetch_thumbnail_images = fetch_thumbnail_images + def read_records( self, sync_mode: SyncMode, @@ -207,17 +225,23 @@ def read_records( ) -> Iterable[Mapping[str, Any]]: """Read records using batch API""" records = self._read_records(params=self.request_params(stream_state=stream_state)) - requests = [record.api_get(fields=self.fields, pending=True) for record in records] + # "thumbnail_data_url" is a field in our stream's schema because we + # output it (see fix_thumbnail_urls below), but it's not a field that + # we can request from Facebook + request_fields = [f for f in self.fields if f != "thumbnail_data_url"] + requests = [record.api_get(fields=request_fields, pending=True) for record in records] for requests_batch in batch(requests, size=self.batch_size): for record in self.execute_in_batch(requests_batch): - yield self.clear_urls(record) + yield self.fix_thumbnail_urls(record) - @staticmethod - def clear_urls(record: MutableMapping[str, Any]) -> MutableMapping[str, Any]: - """Some URLs has random values, these values doesn't affect validity of URLs, but breaks SAT""" + def fix_thumbnail_urls(self, record: MutableMapping[str, Any]) -> MutableMapping[str, Any]: + """Cleans and, if enabled, fetches thumbnail URLs for each creative.""" + # The thumbnail_url contains some extra query parameters that don't affect the validity of the URL, but break SAT thumbnail_url = record.get("thumbnail_url") if thumbnail_url: record["thumbnail_url"] = remove_params_from_url(thumbnail_url, ["_nc_hash", "d"]) + if self._fetch_thumbnail_images: + record["thumbnail_data_url"] = fetch_thumbnail_data_url(thumbnail_url) return record @backoff_policy @@ -402,10 +426,13 @@ def get_json_schema(self) -> Mapping[str, Any]: """Add fields from breakdowns to the stream schema :return: A dict of the JSON schema representing this stream. """ - schema = ResourceSchemaLoader(package_name_from_class(self.__class__)).get_schema("ads_insights") + loader = ResourceSchemaLoader(package_name_from_class(self.__class__)) + schema = loader.get_schema("ads_insights") if self._fields: schema["properties"] = {k: v for k, v in schema["properties"].items() if k in self._fields} - schema["properties"].update(self._schema_for_breakdowns()) + if self.breakdowns: + breakdowns_properties = loader.get_schema("ads_insights_breakdowns")["properties"] + schema["properties"].update({prop: breakdowns_properties[prop] for prop in self.breakdowns}) return schema @cached_property @@ -416,25 +443,6 @@ def fields(self) -> List[str]: schema = ResourceSchemaLoader(package_name_from_class(self.__class__)).get_schema("ads_insights") return list(schema.get("properties", {}).keys()) - def _schema_for_breakdowns(self) -> Mapping[str, Any]: - """Breakdown fields and their type""" - schemas = { - "age": {"type": ["null", "integer", "string"]}, - "gender": {"type": ["null", "string"]}, - "country": {"type": ["null", "string"]}, - "dma": {"type": ["null", "string"]}, - "region": {"type": ["null", "string"]}, - "impression_device": {"type": ["null", "string"]}, - "placement": {"type": ["null", "string"]}, - "platform_position": {"type": ["null", "string"]}, - "publisher_platform": {"type": ["null", "string"]}, - } - breakdowns = self.breakdowns[:] - if "platform_position" in breakdowns: - breakdowns.append("placement") - - return {breakdown: schemas[breakdown] for breakdown in self.breakdowns} - def _date_ranges(self, stream_state: Mapping[str, Any]) -> Iterator[dict]: """Iterate over period between start_date/state and now diff --git a/airbyte-integrations/connectors/source-file-secure/setup.py b/airbyte-integrations/connectors/source-file-secure/setup.py index 7345e937a1a5..7ec39455292f 100644 --- a/airbyte-integrations/connectors/source-file-secure/setup.py +++ b/airbyte-integrations/connectors/source-file-secure/setup.py @@ -14,7 +14,7 @@ "paramiko==2.7.2", "s3fs==0.4.2", "smart-open[all]==4.1.2", - "lxml==4.6.3", + "lxml==4.6.5", "html5lib==1.1", "beautifulsoup4==4.9.3", "pyarrow==3.0.0", diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/Dockerfile b/airbyte-integrations/connectors/source-google-analytics-v4/Dockerfile index a0341e435427..f98cdc190879 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/Dockerfile +++ b/airbyte-integrations/connectors/source-google-analytics-v4/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.13 +LABEL io.airbyte.version=0.1.14 LABEL io.airbyte.name=airbyte/source-google-analytics-v4 diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml index cb2c1801c4fe..a49605332727 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-analytics-v4/acceptance-test-config.yml @@ -22,6 +22,8 @@ tests: - config_path: "secrets/service_config.json" configured_catalog_path: "integration_tests/configured_catalog.json" empty_streams: [] + expect_records: + path: "integration_tests/expected_records.txt" incremental: - config_path: "secrets/service_config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-google-analytics-v4/integration_tests/expected_records.txt new file mode 100644 index 000000000000..3701178cfbe6 --- /dev/null +++ b/airbyte-integrations/connectors/source-google-analytics-v4/integration_tests/expected_records.txt @@ -0,0 +1,6 @@ +{"stream": "new_users_per_day", "data": {"ga_date": "2021-12-10", "ga_country": "United States", "ga_region": "Washington", "ga_newUsers": 1, "view_id": "211669975"}, "emitted_at": 1639563255199} +{"stream": "devices", "data": {"ga_date": "2021-12-10", "ga_deviceCategory": "desktop", "ga_operatingSystem": "Macintosh", "ga_browser": "Firefox", "ga_users": 1, "ga_newUsers": 1, "ga_sessions": 1, "ga_sessionsPerUser": 1.0, "ga_avgSessionDuration": 0.0, "ga_pageviews": 1, "ga_pageviewsPerSession": 1.0, "ga_avgTimeOnPage": 0.0, "ga_bounceRate": 100.0, "ga_exitRate": 100.0, "view_id": "211669975"}, "emitted_at": 1639563253285} +{"stream": "daily_active_users", "data": {"ga_date": "2021-12-10", "ga_1dayUsers": 1, "view_id": "211669975"}, "emitted_at": 1639563251092} +{"stream": "weekly_active_users", "data": {"ga_date": "2021-12-15", "ga_7dayUsers": 1, "view_id": "211669975"}, "emitted_at": 1639563249172} +{"stream": "locations", "data": {"ga_date": "2021-12-10", "ga_continent": "Americas", "ga_subContinent": "Northern America", "ga_country": "United States", "ga_region": "Washington", "ga_metro": "Seattle-Tacoma WA", "ga_city": "Seattle", "ga_users": 1, "ga_newUsers": 1, "ga_sessions": 1, "ga_sessionsPerUser": 1.0, "ga_avgSessionDuration": 0.0, "ga_pageviews": 1, "ga_pageviewsPerSession": 1.0, "ga_avgTimeOnPage": 0.0, "ga_bounceRate": 100.0, "ga_exitRate": 100.0, "view_id": "211669975"}, "emitted_at": 1639563230934} +{"stream": "pages", "data": {"ga_date": "2021-12-10", "ga_hostname": "www.surveymonkey.com", "ga_pagePath": "/apps/NKI5TOTqk4tS5BZyJXU9YQ_3D_3D/preview", "ga_pageviews": 1, "ga_uniquePageviews": 1, "ga_avgTimeOnPage": 0.0, "ga_entrances": 1, "ga_entranceRate": 100.0, "ga_bounceRate": 100.0, "ga_exits": 1, "ga_exitRate": 100.0, "view_id": "211669975"}, "emitted_at": 1639563227527} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/source.py b/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/source.py index 92f4c015aa36..535e551c5dd2 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/source.py +++ b/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/source.py @@ -8,7 +8,7 @@ import time from abc import ABC from datetime import datetime -from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Tuple +from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union import jwt import pendulum @@ -111,6 +111,10 @@ def to_datetime_str(date: datetime) -> str: """ return date.strftime("%Y-%m-%d") + @staticmethod + def to_iso_datetime_str(date: str) -> str: + return datetime.strptime(date, "%Y%m%d").strftime("%Y-%m-%d") + def path(self, **kwargs) -> str: # need add './' for correct urllib.parse.urljoin work due to path contains ':' return "./reports:batchGet" @@ -174,21 +178,25 @@ def get_json_schema(self) -> Mapping[str, Any]: # Add the dimensions to the schema for dimension in self.dimensions: data_type = self.lookup_data_type("dimension", dimension) + data_format = self.lookup_data_format(dimension) dimension = dimension.replace("ga:", "ga_") - schema["properties"][dimension] = { - "type": [data_type], - } + dimension_data = {"type": [data_type]} + if data_format: + dimension_data["format"] = data_format + schema["properties"][dimension] = dimension_data # Add the metrics to the schema for metric in self.metrics: data_type = self.lookup_data_type("metric", metric) + data_format = self.lookup_data_format(metric) metric = metric.replace("ga:", "ga_") - schema["properties"][metric] = { - # metrics are allowed to also have null values - "type": ["null", data_type], - } + # metrics are allowed to also have null values + metric_data = {"type": ["null", data_type]} + if data_format: + metric_data["format"] = data_format + schema["properties"][metric] = metric_data return schema @@ -270,6 +278,21 @@ def lookup_data_type(self, field_type, attribute): return data_type + @staticmethod + def lookup_data_format(attribute: str) -> Union[str, None]: + if attribute == "ga:date": + return "date" + return + + def convert_to_type(self, header, value, data_type): + if data_type == "integer": + return int(value) + if data_type == "number": + return float(value) + if header == "ga:date": + return self.to_iso_datetime_str(value) + return value + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: """ Default response: @@ -359,13 +382,7 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp for header, dimension in zip(dimension_headers, dimensions): data_type = self.lookup_data_type("dimension", header) - - if data_type == "integer": - value = int(dimension) - elif data_type == "number": - value = float(dimension) - else: - value = dimension + value = self.convert_to_type(header, dimension, data_type) record[header.replace("ga:", "ga_")] = value @@ -373,11 +390,7 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp for metric_header, value in zip(metric_headers, values.get("values")): metric_name = metric_header.get("name") metric_type = self.lookup_data_type("metric", metric_name) - - if metric_type == "integer": - value = int(value) - elif metric_type == "number": - value = float(value) + value = self.convert_to_type(metric_name, value, metric_type) record[metric_name.replace("ga:", "ga_")] = value diff --git a/airbyte-integrations/connectors/source-harvest/Dockerfile b/airbyte-integrations/connectors/source-harvest/Dockerfile index fe4b51b5b442..515e9f1a4eb5 100644 --- a/airbyte-integrations/connectors/source-harvest/Dockerfile +++ b/airbyte-integrations/connectors/source-harvest/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.7 +LABEL io.airbyte.version=0.1.8 LABEL io.airbyte.name=airbyte/source-harvest diff --git a/airbyte-integrations/connectors/source-harvest/source_harvest/spec.json b/airbyte-integrations/connectors/source-harvest/source_harvest/spec.json index b6a3fe1c8ac0..3c8325220e7d 100644 --- a/airbyte-integrations/connectors/source-harvest/source_harvest/spec.json +++ b/airbyte-integrations/connectors/source-harvest/source_harvest/spec.json @@ -9,13 +9,13 @@ "properties": { "account_id": { "title": "Account ID", - "description": "Harvest account ID. Required for all Harvest requests in pair with API Key", + "description": "Harvest account ID. Required for all Harvest requests in pair with Personal Access Token", "airbyte_secret": true, "type": "string", "order": 0 }, "replication_start_date": { - "title": "Replication Start Date", + "title": "Start Date", "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.", "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", "examples": ["2017-01-25T00:00:00Z"], @@ -24,13 +24,13 @@ }, "credentials": { "title": "Authentication mechanism", - "description": "Choose how to authenticate to Harvest", + "description": "Choose how to authenticate to Harvest.", "type": "object", "order": 2, "oneOf": [ { "type": "object", - "title": "Authenticate via Harvest (Oauth)", + "title": "Authenticate via Harvest (OAuth)", "required": ["client_id", "client_secret", "refresh_token"], "additionalProperties": false, "properties": { @@ -44,18 +44,18 @@ "client_id": { "title": "Client ID", "type": "string", - "description": "The Client ID of your application" + "description": "The Client ID of your Harvest developer application." }, "client_secret": { "title": "Client Secret", "type": "string", - "description": "The client secret of your application", + "description": "The Client Secret of your Harvest developer application.", "airbyte_secret": true }, "refresh_token": { "title": "Refresh Token", "type": "string", - "description": "A refresh token generated using the above client ID and secret", + "description": "Refresh Token to renew the expired Access Token.", "airbyte_secret": true } } diff --git a/airbyte-integrations/connectors/source-hubspot/Dockerfile b/airbyte-integrations/connectors/source-hubspot/Dockerfile index 587bf72b6e03..7c35101c19b4 100644 --- a/airbyte-integrations/connectors/source-hubspot/Dockerfile +++ b/airbyte-integrations/connectors/source-hubspot/Dockerfile @@ -34,5 +34,5 @@ COPY source_hubspot ./source_hubspot ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.26 +LABEL io.airbyte.version=0.1.29 LABEL io.airbyte.name=airbyte/source-hubspot diff --git a/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml b/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml index dac3581abffb..02faf9dc55f9 100644 --- a/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml @@ -26,14 +26,14 @@ tests: # and therefore the start date is set at 2021-10-10 for `config_oauth.json`, # but the campaign was created on 2021-01-11 empty_streams: ["campaigns", "workflows"] - # incremental: fixme (eugene): '<=' not supported between instances of 'int' and 'str' - # See https://github.com/airbytehq/airbyte/issues/6509 - # - config_path: "secrets/config.json" - # configured_catalog_path: "sample_files/configured_catalog.json" - # future_state_path: "integration_tests/abnormal_state.json" - # cursor_paths: - # subscription_changes: ["timestamp"] - # email_events: ["timestamp"] + incremental: + - config_path: "secrets/config.json" + configured_catalog_path: "sample_files/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + cursor_paths: + subscription_changes: ["timestamp"] + email_events: ["timestamp"] + contact_lists: ["timestamp"] full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "sample_files/full_refresh_catalog.json" diff --git a/airbyte-integrations/connectors/source-hubspot/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-hubspot/integration_tests/abnormal_state.json index 1f6f5a46a9e6..5944b5d50c7a 100644 --- a/airbyte-integrations/connectors/source-hubspot/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-hubspot/integration_tests/abnormal_state.json @@ -1,8 +1,32 @@ { + "companies": { + "updatedAt": "2221-10-12T13:37:56.412000+00:00" + }, + "contact_lists": { + "timestamp": "2221-10-12T13:37:56.412000+00:00" + }, + "contacts": { + "updatedAt": "2221-10-12T13:37:56.412000+00:00" + }, + "deals": { + "updatedAt": "2221-10-12T13:37:56.412000+00:00" + }, "email_events": { - "timestamp": "2121-03-19T17:00:45.743000+00:00" + "timestamp": "2221-10-12T13:37:56.412000+00:00" + }, + "line_items": { + "updatedAt": "2221-10-12T13:37:56.412000+00:00" + }, + "products": { + "updatedAt": "2221-10-12T13:37:56.412000+00:00" + }, + "quotes": { + "updatedAt": "2221-10-12T13:37:56.412000+00:00" }, "subscription_changes": { - "timestamp": "2121-03-19T16:58:54.301000+00:00" + "timestamp": "2221-10-12T13:37:56.412000+00:00" + }, + "tickets": { + "updatedAt": "2221-10-12T13:37:56.412000+00:00" } } diff --git a/airbyte-integrations/connectors/source-hubspot/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-hubspot/sample_files/configured_catalog.json index 3eb0dc3836ca..5f9e42c80d30 100644 --- a/airbyte-integrations/connectors/source-hubspot/sample_files/configured_catalog.json +++ b/airbyte-integrations/connectors/source-hubspot/sample_files/configured_catalog.json @@ -13,28 +13,37 @@ "stream": { "name": "companies", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { "name": "contact_lists", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { "name": "contacts", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { @@ -49,10 +58,13 @@ "stream": { "name": "deals", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { @@ -88,10 +100,13 @@ "stream": { "name": "line_items", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { @@ -118,19 +133,25 @@ "stream": { "name": "products", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { "name": "quotes", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { @@ -148,10 +169,13 @@ "stream": { "name": "tickets", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updatedAt"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["updatedAt"], + "destination_sync_mode": "append" }, { "stream": { diff --git a/airbyte-integrations/connectors/source-hubspot/sample_files/sample_state.json b/airbyte-integrations/connectors/source-hubspot/sample_files/sample_state.json index b56de1fc71ec..84dc55345bbf 100644 --- a/airbyte-integrations/connectors/source-hubspot/sample_files/sample_state.json +++ b/airbyte-integrations/connectors/source-hubspot/sample_files/sample_state.json @@ -1,8 +1,32 @@ { - "subscription_changes": { + "companies": { + "updatedAt": "2021-02-23T00:00:00Z" + }, + "contact_lists": { "timestamp": "2021-02-23T00:00:00Z" }, + "contacts": { + "updatedAt": "2021-02-23T00:00:00Z" + }, + "deals": { + "updatedAt": "2021-02-23T00:00:00Z" + }, "email_events": { "timestamp": "2021-02-23T00:00:00Z" + }, + "line_items": { + "updatedAt": "2021-02-23T00:00:00Z" + }, + "products": { + "updatedAt": "2021-02-23T00:00:00Z" + }, + "quotes": { + "updatedAt": "2021-02-23T00:00:00Z" + }, + "subscription_changes": { + "timestamp": "2021-02-23T00:00:00Z" + }, + "tickets": { + "updatedAt": "2021-02-23T00:00:00Z" } -} +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/api.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/api.py index e1ea36cbf4aa..01d353f4920b 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/api.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/api.py @@ -436,6 +436,9 @@ class IncrementalStream(Stream, ABC): state_pk = "timestamp" limit = 1000 + # Flag which enable/disable chunked read in read_chunked method + # False -> chunk size is max (only one slice), True -> chunk_size is 30 days + need_chunk = True @property @abstractmethod @@ -446,12 +449,15 @@ def updated_at_field(self): def state(self) -> Optional[Mapping[str, Any]]: """Current state, if wasn't set return None""" if self._state: - return {self.state_pk: str(self._state)} + return ( + {self.state_pk: int(self._state.timestamp() * 1000)} if self.state_pk == "timestamp" else {self.state_pk: str(self._state)} + ) return None @state.setter def state(self, value): - self._state = pendulum.parse(value[self.state_pk]) + state = value[self.state_pk] + self._state = pendulum.parse(str(pendulum.from_timestamp(state / 1000))) if isinstance(state, int) else pendulum.parse(state) self._start_date = max(self._state, self._start_date) def __init__(self, *args, **kwargs): @@ -477,12 +483,13 @@ def read(self, getter: Callable, params: Mapping[str, Any] = None) -> Iterator: self._start_date = self._state def read_chunked( - self, getter: Callable, params: Mapping[str, Any] = None, chunk_size: pendulum.duration = pendulum.duration(days=1) + self, getter: Callable, params: Mapping[str, Any] = None, chunk_size: pendulum.duration = pendulum.duration(days=30) ) -> Iterator: params = {**params} if params else {} now_ts = int(pendulum.now().timestamp() * 1000) start_ts = int(self._start_date.timestamp() * 1000) - chunk_size = int(chunk_size.total_seconds() * 1000) + max_delta = now_ts - start_ts + chunk_size = int(chunk_size.total_seconds() * 1000) if self.need_chunk else max_delta for ts in range(start_ts, now_ts, chunk_size): end_ts = ts + chunk_size @@ -553,6 +560,12 @@ def _flat_associations(self, records: Iterable[MutableMapping]) -> Iterable[Muta yield record +class CRMObjectIncrementalStream(CRMObjectStream, IncrementalStream): + state_pk = "updatedAt" + limit = 100 + need_chunk = False + + class CampaignStream(Stream): """Email campaigns, API v1 There is some confusion between emails and campaigns in docs, this endpoint returns actual emails @@ -571,7 +584,7 @@ def list(self, fields) -> Iterable: yield {**row, **record} -class ContactListStream(Stream): +class ContactListStream(IncrementalStream): """Contact lists, API v1 Docs: https://legacydocs.hubspot.com/docs/methods/lists/get_lists """ @@ -582,6 +595,7 @@ class ContactListStream(Stream): updated_at_field = "updatedAt" created_at_field = "createdAt" limit_field = "count" + need_chunk = False class DealStageHistoryStream(Stream): @@ -608,7 +622,7 @@ def list(self, fields) -> Iterable: yield from self.read(partial(self._api.get, url=self.url), params) -class DealStream(CRMObjectStream): +class DealStream(CRMObjectIncrementalStream): """Deals, API v3""" def __init__(self, **kwargs): diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/client.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/client.py index fd48c0816a25..b47bdaeaa4d1 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/client.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/client.py @@ -12,7 +12,7 @@ API, CampaignStream, ContactListStream, - CRMObjectStream, + CRMObjectIncrementalStream, DealPipelineStream, DealStream, EmailEventStream, @@ -35,26 +35,26 @@ def __init__(self, start_date, credentials, **kwargs): common_params = dict(api=self._api, start_date=self._start_date) self._apis = { "campaigns": CampaignStream(**common_params), - "companies": CRMObjectStream(entity="company", associations=["contacts"], **common_params), + "companies": CRMObjectIncrementalStream(entity="company", associations=["contacts"], **common_params), "contact_lists": ContactListStream(**common_params), - "contacts": CRMObjectStream(entity="contact", **common_params), + "contacts": CRMObjectIncrementalStream(entity="contact", **common_params), "deal_pipelines": DealPipelineStream(**common_params), "deals": DealStream(associations=["contacts"], **common_params), "email_events": EmailEventStream(**common_params), "engagements": EngagementStream(**common_params), "forms": FormStream(**common_params), - "line_items": CRMObjectStream(entity="line_item", **common_params), + "line_items": CRMObjectIncrementalStream(entity="line_item", **common_params), "marketing_emails": MarketingEmailStream(**common_params), "owners": OwnerStream(**common_params), - "products": CRMObjectStream(entity="product", **common_params), + "products": CRMObjectIncrementalStream(entity="product", **common_params), "subscription_changes": SubscriptionChangeStream(**common_params), - "tickets": CRMObjectStream(entity="ticket", **common_params), + "tickets": CRMObjectIncrementalStream(entity="ticket", **common_params), "workflows": WorkflowStream(**common_params), } credentials_title = credentials.get("credentials_title") if credentials_title == "API Key Credentials": - self._apis["quotes"] = CRMObjectStream(entity="quote", **common_params) + self._apis["quotes"] = CRMObjectIncrementalStream(entity="quote", **common_params) super().__init__(**kwargs) diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/deals.json b/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/deals.json index f6564e9bb731..c8545f9459b3 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/deals.json +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/deals.json @@ -310,10 +310,12 @@ } }, "createdAt": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "updatedAt": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "archived": { "type": ["null", "boolean"] diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/spec.json b/airbyte-integrations/connectors/source-hubspot/source_hubspot/spec.json index ef545b0b42af..02a5e2b8f993 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/spec.json +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/spec.json @@ -5,23 +5,23 @@ "title": "HubSpot Source Spec", "type": "object", "required": ["start_date", "credentials"], - "additionalProperties": false, + "additionalProperties": true, "properties": { "start_date": { "type": "string", - "title": "Replication start date", + "title": "Start Date", "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.", "examples": ["2017-01-25T00:00:00Z"] }, "credentials": { "title": "Authentication mechanism", - "description": "Choose either to provide the API key or the OAuth2.0 credentials", + "description": "Choose how to authenticate to HubSpot.", "type": "object", "oneOf": [ { "type": "object", - "title": "Authenticate via HubSpot (Oauth)", + "title": "Authenticate via HubSpot (OAuth)", "required": [ "client_id", "client_secret", @@ -31,7 +31,7 @@ "properties": { "credentials_title": { "type": "string", - "title": "Credentials title", + "title": "Credentials Title", "description": "Name of the credentials set", "const": "OAuth Credentials", "enum": ["OAuth Credentials"], @@ -40,20 +40,20 @@ }, "client_id": { "title": "Client ID", - "description": "HubSpot client_id. See our docs if you need help finding this id.", + "description": "The Client ID of your HubSpot developer application. See our docs if you need help finding this id.", "type": "string", "examples": ["123456789000"] }, "client_secret": { "title": "Client Secret", - "description": "HubSpot client_secret. See our docs if you need help finding this secret.", + "description": "The Client Secret of your HubSpot developer application. See our docs if you need help finding this secret.", "type": "string", "examples": ["secret"], "airbyte_secret": true }, "refresh_token": { - "title": "Refresh token", - "description": "HubSpot refresh_token. See our docs if you need help generating the token.", + "title": "Refresh Token", + "description": "Refresh Token to renew the expired Access Token. See our docs if you need help generating the token.", "type": "string", "examples": ["refresh_token"], "airbyte_secret": true diff --git a/airbyte-integrations/connectors/source-intercom/Dockerfile b/airbyte-integrations/connectors/source-intercom/Dockerfile index 8e86bd12ee54..7cea41697a66 100644 --- a/airbyte-integrations/connectors/source-intercom/Dockerfile +++ b/airbyte-integrations/connectors/source-intercom/Dockerfile @@ -35,5 +35,5 @@ COPY source_intercom ./source_intercom ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.11 +LABEL io.airbyte.version=0.1.12 LABEL io.airbyte.name=airbyte/source-intercom diff --git a/airbyte-integrations/connectors/source-intercom/source_intercom/spec.json b/airbyte-integrations/connectors/source-intercom/source_intercom/spec.json index ad04c0891be4..c20371c59d2f 100644 --- a/airbyte-integrations/connectors/source-intercom/source_intercom/spec.json +++ b/airbyte-integrations/connectors/source-intercom/source_intercom/spec.json @@ -9,14 +9,15 @@ "properties": { "start_date": { "type": "string", - "description": "The date from which you'd like to replicate data for Intercom API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.", + "title": "Start Date", + "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.", "examples": ["2020-11-16T00:00:00Z"], "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" }, "access_token": { "title": "Access Token", "type": "string", - "description": "Access token generated either from an oauth flow or from the Intercom Developer dashboard. See the docs for more information on how to obtain this key manually.", + "description": "Access Token for making authenticated requests. See the docs for more information on how to obtain this key manually.", "airbyte_secret": true } } diff --git a/airbyte-integrations/connectors/source-jdbc/Dockerfile b/airbyte-integrations/connectors/source-jdbc/Dockerfile index 5e5967e3973b..5bb0bb271375 100644 --- a/airbyte-integrations/connectors/source-jdbc/Dockerfile +++ b/airbyte-integrations/connectors/source-jdbc/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-jdbc -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.3.1 LABEL io.airbyte.name=airbyte/source-jdbc diff --git a/airbyte-integrations/connectors/source-jdbc/build.gradle b/airbyte-integrations/connectors/source-jdbc/build.gradle index da8d05827fd8..44813fddb4de 100644 --- a/airbyte-integrations/connectors/source-jdbc/build.gradle +++ b/airbyte-integrations/connectors/source-jdbc/build.gradle @@ -50,4 +50,4 @@ dependencies { testFixturesImplementation group: 'org.mockito', name: 'mockito-junit-jupiter', version: '4.0.0' implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-kafka/Dockerfile b/airbyte-integrations/connectors/source-kafka/Dockerfile index 7fd895d7bf0c..0aaabaadfe90 100644 --- a/airbyte-integrations/connectors/source-kafka/Dockerfile +++ b/airbyte-integrations/connectors/source-kafka/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-kafka -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.name=airbyte/source-kafka LABEL io.airbyte.version=0.1.1 diff --git a/airbyte-integrations/connectors/source-linnworks/Dockerfile b/airbyte-integrations/connectors/source-linnworks/Dockerfile index 48fb7c3246db..af8d5dd41494 100644 --- a/airbyte-integrations/connectors/source-linnworks/Dockerfile +++ b/airbyte-integrations/connectors/source-linnworks/Dockerfile @@ -34,5 +34,5 @@ COPY source_linnworks ./source_linnworks ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/source-linnworks diff --git a/airbyte-integrations/connectors/source-linnworks/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-linnworks/integration_tests/abnormal_state.json index 753bb769cc9b..fc503ae2fd7f 100644 --- a/airbyte-integrations/connectors/source-linnworks/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-linnworks/integration_tests/abnormal_state.json @@ -1,5 +1,8 @@ { "processed_orders": { - "dReceivedDate": "2050-01-01T00:00:00+00:00" + "dProcessedOn": "2050-01-01T00:00:00+00:00" + }, + "processed_order_details": { + "ProcessedDateTime": "2050-01-01T00:00:00+00:00" } } diff --git a/airbyte-integrations/connectors/source-linnworks/integration_tests/catalog.json b/airbyte-integrations/connectors/source-linnworks/integration_tests/catalog.json index 0014dca6ca3a..2b51b72441c1 100644 --- a/airbyte-integrations/connectors/source-linnworks/integration_tests/catalog.json +++ b/airbyte-integrations/connectors/source-linnworks/integration_tests/catalog.json @@ -28,7 +28,7 @@ "description": "Location tag" }, "BinRack": { - "type": "string", + "type": ["null", "string"], "description": "Bin rack" }, "IsWarehouseManaged": { @@ -225,7 +225,7 @@ "description": "Location tag" }, "BinRack": { - "type": "string", + "type": ["null", "string"], "description": "Bin rack" }, "IsWarehouseManaged": { @@ -271,7 +271,7 @@ "description": "if( Quantity == 0 ) dbo.StockItem.PurchasePrice Else CurrentStockValue / Quantity" }, "SKU": { - "type": "string", + "type": ["null", "string"], "description": "Product SKU" }, "AutoAdjust": { @@ -495,7 +495,7 @@ "description": "Url to full size image" }, "CheckSumValue": { - "type": "string", + "type": ["null", "string"], "description": "Image check sum" }, "pkRowId": { @@ -511,11 +511,11 @@ "description": "Sort order for the image" }, "ChecksumValue": { - "type": "string", + "type": ["null", "string"], "description": "Internal checksum value" }, "RawChecksum": { - "type": "string", + "type": ["null", "string"], "description": "Raw file checksum (Used for UI to determine if the image file is the same before submitting for upload)" }, "StockItemId": { @@ -582,7 +582,7 @@ "description": "Default package group id" }, "PackageGroupName": { - "type": "string", + "type": ["null", "string"], "description": "Default package group name" }, "Height": { @@ -643,7 +643,7 @@ "description": "Order ID" }, "cShippingAddress": { - "type": "string", + "type": ["null", "string"], "description": "Customer's shipping address" }, "dReceivedDate": { @@ -725,11 +725,11 @@ "description": "Postal service code" }, "Vendor": { - "type": "string", + "type": ["null", "string"], "description": "Courier name (e.g. DPD)" }, "BillingEmailAddress": { - "type": "string" + "type": ["null", "string"] }, "ReferenceNum": { "type": "string", @@ -806,11 +806,11 @@ "description": "When order was cancelled" }, "PackageCategory": { - "type": "string", + "type": ["null", "string"], "description": "Package category" }, "PackageTitle": { - "type": "string", + "type": ["null", "string"], "description": "Package name" }, "ItemWeight": { @@ -822,51 +822,51 @@ "description": "Total order weight" }, "FolderCollection": { - "type": "string", + "type": ["null", "string"], "description": "Folder name of an order" }, "cBillingAddress": { - "type": "string", + "type": ["null", "string"], "description": "Customer billing address" }, "BillingName": { - "type": "string", + "type": ["null", "string"], "description": "Customer billing name" }, "BillingCompany": { - "type": "string", + "type": ["null", "string"], "description": "Customer billing company" }, "BillingAddress1": { - "type": "string", + "type": ["null", "string"], "description": "Billing address line one" }, "BillingAddress2": { - "type": "string", + "type": ["null", "string"], "description": "Billing address line two" }, "BillingAddress3": { - "type": "string", + "type": ["null", "string"], "description": "Billing address line three" }, "BillingTown": { - "type": "string", + "type": ["null", "string"], "description": "Billing town" }, "BillingRegion": { - "type": "string", + "type": ["null", "string"], "description": "Billing region, area, county" }, "BillingPostCode": { - "type": "string", + "type": ["null", "string"], "description": "Billing postcode" }, "BillingCountryName": { - "type": "string", + "type": ["null", "string"], "description": "Billing country" }, "BillingPhoneNumber": { - "type": "string", + "type": ["null", "string"], "description": "Billing phone number" }, "HoldOrCancel": { @@ -893,8 +893,879 @@ }, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["dReceivedDate"], + "default_cursor_field": ["dProcessedOn"], "source_defined_primary_key": [["nOrderId"]] + }, + { + "name": "processed_order_details", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "OrderId": { + "type": "string", + "description": "Order ID (pkOrderId)" + }, + "NumOrderId": { + "type": "integer", + "description": "Linnworks order number" + }, + "Processed": { + "type": "boolean", + "description": "If order is processed" + }, + "ProcessedDateTime": { + "type": ["null", "string"], + "format": "date-time", + "description": "Date and time when order was processed" + }, + "FulfilmentLocationId": { + "type": "string", + "description": "Location ID" + }, + "GeneralInfo": { + "type": "object", + "description": "General information about order", + "additionalProperties": false, + "properties": { + "Status": { + "type": "integer", + "description": "Order Status (0 = UNPAID, 1 = PAID, 2 = RETURN, 3 = PENDING, 4 = RESEND)" + }, + "LabelPrinted": { + "type": "boolean", + "description": "Is label printed" + }, + "LabelError": { + "type": "string", + "description": "Is there a label error" + }, + "InvoicePrinted": { + "type": "boolean", + "description": "Is invoice printed" + }, + "PickListPrinted": { + "type": "boolean", + "description": "Is pick list printed" + }, + "IsRuleRun": { + "type": "boolean", + "description": "If rules engine rule ran on an order" + }, + "Notes": { + "type": "integer", + "description": "Quantity of order notes" + }, + "PartShipped": { + "type": "boolean", + "description": "If order partly shipped" + }, + "Marker": { + "type": ["null", "integer"], + "description": "Order marker (0 = NOT TAG, 1 = Tag 1, 2 = Tag 2, 3 = Tag 3, 4 = Tag 4, 5 = Tag 5, 6 = Tag 6, 7 = Parked)" + }, + "IsParked": { + "type": "boolean", + "description": "Is the order parked?" + }, + "Identifiers": { + "type": "array", + "description": "Order identifiers. [Prime | Scheduled]", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "IdentifierId": { + "type": "integer", + "description": "Internal identifier id. Use to update image and name." + }, + "IsCustom": { + "type": "boolean", + "description": "Is the tag user or system defined?" + }, + "ImageId": { + "type": "string" + }, + "ImageUrl": { + "type": "string" + }, + "Tag": { + "type": "string", + "description": "Internal tag for identification purposes" + }, + "Name": { + "type": "string", + "description": "Name displayed where the tag is used" + } + } + } + }, + "ReferenceNum": { + "type": "string", + "description": "Order reference number (Channel defined)" + }, + "SecondaryReference": { + "type": "string", + "description": "An additional reference number for the orderr (Used by some channels)" + }, + "ExternalReferenceNum": { + "type": "string", + "description": "This is an additional reference number from the sales channel, typically used by eBay" + }, + "ReceivedDate": { + "type": "string", + "format": "date-time", + "description": "The date and time at which the order was placed on the sales channel" + }, + "Source": { + "type": "string", + "description": "Order ChannelName/Source (e.g. EBAY)" + }, + "SubSource": { + "type": "string", + "description": "Order Subsource (e.g. EBAY1)" + }, + "SiteCode": { + "type": "string", + "description": "SiteCode used to differentiate between different sites from a single channel (eg. Amazon UK, Amazon US, Amazon FR...)" + }, + "HoldOrCancel": { + "type": "boolean", + "description": "This shows whether the order has been marked as on hold, for processed orders if the order has been cancelled OnHold = 1" + }, + "DespatchByDate": { + "type": "string", + "format": "date-time", + "description": "Despatch by Date" + }, + "ScheduledDelivery": { + "type": "object", + "description": "Scheduled delivery dates. Take priority over despatch by date", + "additionalProperties": false, + "properties": { + "From": { + "type": "string", + "format": "date-time" + }, + "To": { + "type": "string", + "format": "date-time" + } + } + }, + "HasScheduledDelivery": { + "type": "boolean" + }, + "Location": { + "type": "string", + "description": "Order location ID" + }, + "NumItems": { + "type": "integer", + "description": "Quantity of order items" + }, + "PickwaveIds": { + "type": "array", + "description": "All related Pickwave Ids", + "items": { + "type": "integer" + } + }, + "StockAllocationType": { + "type": ["null", "string"] + } + } + }, + "ShippingInfo": { + "type": "object", + "description": "Order shipping information", + "additionalProperties": false, + "properties": { + "Vendor": { + "type": "string", + "description": "Courier name (e.g. Royal Mail)" + }, + "PostalServiceId": { + "type": "string", + "description": "Postal service ID" + }, + "PostalServiceName": { + "type": "string", + "description": "Postal service name (e.g. Next day delivery)" + }, + "TotalWeight": { + "type": "number", + "description": "Order total weight" + }, + "ItemWeight": { + "type": "number", + "description": "If order is processed" + }, + "PackageCategoryId": { + "type": "string", + "description": "Package category ID" + }, + "PackageCategory": { + "type": "string", + "description": "Package category name" + }, + "PackageTypeId": { + "type": ["null", "string"], + "description": "Package type ID" + }, + "PackageType": { + "type": "string", + "description": "Package type name" + }, + "PostageCost": { + "type": "number", + "description": "Order postage cost" + }, + "PostageCostExTax": { + "type": "number", + "description": "Order postage cost excluding tax" + }, + "TrackingNumber": { + "type": "string", + "description": "Order tracking number provided by courier" + }, + "ManualAdjust": { + "type": "boolean", + "description": "If there is an adjustment to shipping cost was made" + } + } + }, + "CustomerInfo": { + "type": "object", + "description": "Order Customer information (Name, email etc)", + "additionalProperties": false, + "properties": { + "ChannelBuyerName": { + "type": "string", + "description": "Username of customer (Comes from channel)" + }, + "Address": { + "type": "object", + "description": "Customer address", + "additionalProperties": false, + "properties": { + "EmailAddress": { + "type": "string", + "description": "Customer's email address." + }, + "Address1": { + "type": "string", + "description": "First line of customer address." + }, + "Address2": { + "type": "string", + "description": "Second line of customer address." + }, + "Address3": { + "type": "string", + "description": "Third line of customer address." + }, + "Town": { + "type": "string", + "description": "Customer's town." + }, + "Region": { + "type": "string", + "description": "Customer's region." + }, + "PostCode": { + "type": "string", + "description": "Customer's postcode." + }, + "Country": { + "type": "string", + "description": "Customer's country." + }, + "Continent": { + "type": "string", + "description": "Customer's continent" + }, + "FullName": { + "type": "string", + "description": "Customer's first and second name." + }, + "Company": { + "type": "string", + "description": "Customer's company name." + }, + "PhoneNumber": { + "type": "string", + "description": "Customer's telephone number." + }, + "CountryId": { + "type": "string" + } + } + }, + "BillingAddress": { + "type": "object", + "description": "Customer billing address", + "additionalProperties": false, + "properties": { + "EmailAddress": { + "type": "string", + "description": "Customer's email address." + }, + "Address1": { + "type": "string", + "description": "First line of customer address." + }, + "Address2": { + "type": "string", + "description": "Second line of customer address." + }, + "Address3": { + "type": "string", + "description": "Third line of customer address." + }, + "Town": { + "type": "string", + "description": "Customer's town." + }, + "Region": { + "type": "string", + "description": "Customer's region." + }, + "PostCode": { + "type": "string", + "description": "Customer's postcode." + }, + "Country": { + "type": "string", + "description": "Customer's country." + }, + "Continent": { + "type": "string", + "description": "Customer's continent" + }, + "FullName": { + "type": "string", + "description": "Customer's first and second name." + }, + "Company": { + "type": "string", + "description": "Customer's company name." + }, + "PhoneNumber": { + "type": "string", + "description": "Customer's telephone number." + }, + "CountryId": { + "type": "string" + } + } + } + } + }, + "TotalsInfo": { + "type": "object", + "description": "Order totals information", + "additionalProperties": false, + "properties": { + "pkOrderId": { + "type": "string", + "description": "Order Id" + }, + "Subtotal": { + "type": "number", + "description": "Order subtotal" + }, + "PostageCost": { + "type": "number", + "description": "Order postage cost" + }, + "PostageCostExTax": { + "type": "number", + "description": "Order postage cost ex. tax" + }, + "Tax": { + "type": "number", + "description": "Tax" + }, + "TotalCharge": { + "type": "number", + "description": "Total charge" + }, + "PaymentMethod": { + "type": "string", + "description": "Payment method" + }, + "PaymentMethodId": { + "type": "string", + "description": "Payment method ID" + }, + "ProfitMargin": { + "type": "number", + "description": "Profit margin" + }, + "TotalDiscount": { + "type": "number", + "description": "Total discount applied to the order" + }, + "Currency": { + "type": "string", + "description": "Order currency" + }, + "CountryTaxRate": { + "type": "number", + "description": "Country tax rate" + }, + "ConversionRate": { + "type": "number", + "description": "Currency conversion rate. Set at point of save by the currency" + } + } + }, + "ExtendedProperties": { + "type": "array", + "description": "Extended properties of an order", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "RowId": { + "type": "string", + "description": "Record row ID" + }, + "Name": { + "type": "string", + "description": "Extended property name" + }, + "Value": { + "type": "string", + "description": "Extended property value" + }, + "Type": { + "type": "string", + "description": "Extended property type" + } + } + } + }, + "FolderName": { + "type": "array", + "description": "Folder names assigned to an order", + "items": { + "type": "string" + } + }, + "Items": { + "type": "array", + "description": "List of order items", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "ItemId": { + "type": "string", + "description": "Stock Item ID" + }, + "ItemNumber": { + "type": "string", + "description": "Item number as on channel" + }, + "SKU": { + "type": "string", + "description": "Product SKU" + }, + "ItemSource": { + "type": "string", + "description": "Item source / channel name" + }, + "Title": { + "type": "string", + "description": "Item title" + }, + "Quantity": { + "type": "integer", + "description": "Quantity" + }, + "CategoryId": { + "type": "string" + }, + "CategoryName": { + "type": "string", + "description": "Product category" + }, + "CompositeAvailablity": { + "type": ["null", "integer"], + "description": "Composite availability" + }, + "StockLevelsSpecified": { + "type": "boolean", + "description": "If stock level specified" + }, + "OnOrder": { + "type": "integer", + "description": "Level due in purchase orders" + }, + "OnPurchaseOrder": { + "type": "object", + "description": "Purchase order bound to this item", + "additionalProperties": false, + "properties": { + "pkPurchaseItemId": { + "type": "string", + "description": "Primary key of the bound" + }, + "Rowid": { + "type": "string" + }, + "pkPurchaseId": { + "type": "string" + }, + "ExternalInvoiceNumber": { + "type": "string" + }, + "fkSupplierId": { + "type": "string" + }, + "DateOfDelivery": { + "type": "string", + "format": "date-time" + }, + "QuotedDeliveryDate": { + "type": "string", + "format": "date-time" + }, + "SupplierName": { + "type": "string" + }, + "fkLocationId": { + "type": "string" + } + } + }, + "InOrderBook": { + "type": ["null", "integer"], + "description": "Quantity currently in open orders" + }, + "Level": { + "type": "integer", + "description": "Current stock level" + }, + "MinimumLevel": { + "type": ["null", "integer"], + "description": "Minimum level" + }, + "AvailableStock": { + "type": "integer", + "description": "Currently available stock level (Level-InOrderBook)" + }, + "PricePerUnit": { + "type": "number", + "description": "Unit price" + }, + "UnitCost": { + "type": "number", + "description": "Unit cost" + }, + "DespatchStockUnitCost": { + "type": "number", + "description": "Despatch stock unit cost" + }, + "Discount": { + "type": "number", + "description": "Percentage (0%, 10%, 20%, etc...)" + }, + "Tax": { + "type": "number", + "description": "Actual tax value on an item" + }, + "TaxRate": { + "type": "number", + "description": "Tax rate" + }, + "Cost": { + "type": "number", + "description": "Total item cost (exc tax)" + }, + "CostIncTax": { + "type": "number", + "description": "Total item cost (inc tax)" + }, + "CompositeSubItems": { + "$comment": "It should be \"$ref\": \"#/properties/Items\" but Airbyte doesn't support recursive $refs.", + "type": "array", + "items": { + "type": "object" + } + }, + "IsService": { + "type": "boolean", + "description": "if item is a service" + }, + "SalesTax": { + "type": "number", + "description": "Sales Tax" + }, + "TaxCostInclusive": { + "type": "boolean", + "description": "If tax is included in a cost" + }, + "PartShipped": { + "type": "boolean", + "description": "If order is partly shipped" + }, + "Weight": { + "type": "number", + "description": "Order weight" + }, + "BarcodeNumber": { + "type": "string", + "description": "Product barcode" + }, + "Market": { + "type": "integer", + "description": "Market" + }, + "ChannelSKU": { + "type": "string", + "description": "Channel product SKU" + }, + "ChannelTitle": { + "type": "string", + "description": "Channel product title" + }, + "DiscountValue": { + "type": "number" + }, + "HasImage": { + "type": "boolean", + "description": "If item got an image" + }, + "ImageId": { + "type": ["null", "string"], + "description": "Image ID" + }, + "AdditionalInfo": { + "type": "array", + "description": "List of order item options", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "pkOptionId": { + "type": "string", + "description": "Option ID" + }, + "Property": { + "type": "string", + "description": "Option property" + }, + "Value": { + "type": "string", + "description": "Value of the option" + } + } + } + }, + "StockLevelIndicator": { + "type": "integer", + "description": "Stock level indicator" + }, + "ShippingCost": { + "type": "number", + "description": "If batch number scan required" + }, + "PartShippedQty": { + "type": "integer", + "description": "ShippingCost" + }, + "ItemName": { + "type": "string", + "description": "PartShippedQty" + }, + "BatchNumberScanRequired": { + "type": "boolean", + "description": "ItemName" + }, + "SerialNumberScanRequired": { + "type": "boolean", + "description": "If serial number scan required" + }, + "BinRack": { + "type": "string", + "description": "Binrack location" + }, + "BinRacks": { + "type": "array", + "description": "List of BinRacks used for OrderItem", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "Quantity": { + "type": "integer", + "description": "Quantity for BinRack per Location" + }, + "BinRack": { + "type": "string", + "description": "BinRack" + }, + "Location": { + "type": "string", + "description": "LocationId of the BinRack" + }, + "BatchId": { + "type": ["null", "integer"], + "description": "If the item is batched, identifies the batch number" + }, + "OrderItemBatchId": { + "type": ["null", "integer"], + "description": "If the item is batched, identifies the unique order item batch row" + } + } + } + }, + "InventoryTrackingType": { + "type": "integer", + "description": "Identifies whether the item has a sell by date or other defined order in which inventory is to be sold" + }, + "isBatchedStockItem": { + "type": "boolean", + "description": "If item has batches" + }, + "IsWarehouseManaged": { + "type": "boolean" + }, + "IsUnlinked": { + "type": "boolean" + }, + "ParentItemId": { + "type": "string" + }, + "StockItemIntId": { + "type": "integer" + }, + "Boxes": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "BoxId": { + "type": "integer", + "description": "Unique box id." + }, + "StockItemIntId": { + "type": "integer" + }, + "BoxName": { + "type": "string", + "description": "Box name max 16 characters" + }, + "Width": { + "type": "number", + "description": "Width of the box" + }, + "Height": { + "type": "number", + "description": "Height of the box" + }, + "Length": { + "type": "number", + "description": "Depth of the box" + }, + "Weight": { + "type": "number", + "description": "Total weight of the box." + }, + "ValuePercentage": { + "type": "number", + "description": "Value break down percentage" + }, + "Barcode": { + "type": "string", + "description": "Box barcode, max 64 characters." + }, + "PackagingTypeId": { + "type": "string", + "description": "Packaging type id" + }, + "LogicalDelete": { + "type": "boolean", + "description": "IsDeleted flag." + } + } + } + }, + "RowId": { + "type": "string", + "description": "Record row ID" + }, + "OrderId": { + "type": "string", + "description": "Order ID (pkOrderID)" + }, + "StockItemId": { + "type": "string", + "description": "Stock Item ID" + }, + "StockId": { + "type": "string" + } + } + } + }, + "Notes": { + "type": "array", + "description": "List of order notes", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "OrderNoteId": { + "type": "string", + "description": "Order note ID" + }, + "OrderId": { + "type": "string", + "description": "Order Id" + }, + "NoteDate": { + "type": "string", + "format": "date-time", + "description": "Date and time when note was added" + }, + "Internal": { + "type": "boolean", + "description": "order note type (Internal or External)" + }, + "Note": { + "type": "string", + "description": "Note's text" + }, + "CreatedBy": { + "type": "string", + "description": "User that created note" + }, + "NoteTypeId": { + "type": ["null", "string"] + } + } + } + }, + "PaidDateTime": { + "type": ["null", "string"], + "format": "date-time", + "description": "Date and time when the order was marked as paid" + }, + "TaxId": { + "type": "string", + "description": "Buyer's tax number." + } + } + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["ProcessedDateTime"], + "source_defined_primary_key": [["NumOrderId"]] } ] } diff --git a/airbyte-integrations/connectors/source-linnworks/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-linnworks/integration_tests/configured_catalog.json index d161cde9de95..42b4a9d179e0 100644 --- a/airbyte-integrations/connectors/source-linnworks/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-linnworks/integration_tests/configured_catalog.json @@ -29,7 +29,7 @@ "description": "Location tag" }, "BinRack": { - "type": "string", + "type": ["null", "string"], "description": "Bin rack" }, "IsWarehouseManaged": { @@ -234,7 +234,7 @@ "description": "Location tag" }, "BinRack": { - "type": "string", + "type": ["null", "string"], "description": "Bin rack" }, "IsWarehouseManaged": { @@ -280,7 +280,7 @@ "description": "if( Quantity == 0 ) dbo.StockItem.PurchasePrice Else CurrentStockValue / Quantity" }, "SKU": { - "type": "string", + "type": ["null", "string"], "description": "Product SKU" }, "AutoAdjust": { @@ -504,7 +504,7 @@ "description": "Url to full size image" }, "CheckSumValue": { - "type": "string", + "type": ["null", "string"], "description": "Image check sum" }, "pkRowId": { @@ -520,11 +520,11 @@ "description": "Sort order for the image" }, "ChecksumValue": { - "type": "string", + "type": ["null", "string"], "description": "Internal checksum value" }, "RawChecksum": { - "type": "string", + "type": ["null", "string"], "description": "Raw file checksum (Used for UI to determine if the image file is the same before submitting for upload)" }, "StockItemId": { @@ -591,7 +591,7 @@ "description": "Default package group id" }, "PackageGroupName": { - "type": "string", + "type": ["null", "string"], "description": "Default package group name" }, "Height": { @@ -656,7 +656,7 @@ "description": "Order ID" }, "cShippingAddress": { - "type": "string", + "type": ["null", "string"], "description": "Customer's shipping address" }, "dReceivedDate": { @@ -738,11 +738,11 @@ "description": "Postal service code" }, "Vendor": { - "type": "string", + "type": ["null", "string"], "description": "Courier name (e.g. DPD)" }, "BillingEmailAddress": { - "type": "string" + "type": ["null", "string"] }, "ReferenceNum": { "type": "string", @@ -819,11 +819,11 @@ "description": "When order was cancelled" }, "PackageCategory": { - "type": "string", + "type": ["null", "string"], "description": "Package category" }, "PackageTitle": { - "type": "string", + "type": ["null", "string"], "description": "Package name" }, "ItemWeight": { @@ -835,51 +835,51 @@ "description": "Total order weight" }, "FolderCollection": { - "type": "string", + "type": ["null", "string"], "description": "Folder name of an order" }, "cBillingAddress": { - "type": "string", + "type": ["null", "string"], "description": "Customer billing address" }, "BillingName": { - "type": "string", + "type": ["null", "string"], "description": "Customer billing name" }, "BillingCompany": { - "type": "string", + "type": ["null", "string"], "description": "Customer billing company" }, "BillingAddress1": { - "type": "string", + "type": ["null", "string"], "description": "Billing address line one" }, "BillingAddress2": { - "type": "string", + "type": ["null", "string"], "description": "Billing address line two" }, "BillingAddress3": { - "type": "string", + "type": ["null", "string"], "description": "Billing address line three" }, "BillingTown": { - "type": "string", + "type": ["null", "string"], "description": "Billing town" }, "BillingRegion": { - "type": "string", + "type": ["null", "string"], "description": "Billing region, area, county" }, "BillingPostCode": { - "type": "string", + "type": ["null", "string"], "description": "Billing postcode" }, "BillingCountryName": { - "type": "string", + "type": ["null", "string"], "description": "Billing country" }, "BillingPhoneNumber": { - "type": "string", + "type": ["null", "string"], "description": "Billing phone number" }, "HoldOrCancel": { @@ -906,11 +906,886 @@ }, "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": ["dReceivedDate"], + "default_cursor_field": ["dProcessedOn"], "source_defined_primary_key": [["nOrderId"]] }, "sync_mode": "incremental", "destination_sync_mode": "append_dedup" + }, + { + "stream": { + "name": "processed_order_details", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "OrderId": { + "type": "string", + "description": "Order ID (pkOrderId)" + }, + "NumOrderId": { + "type": "integer", + "description": "Linnworks order number" + }, + "Processed": { + "type": "boolean", + "description": "If order is processed" + }, + "ProcessedDateTime": { + "type": ["null", "string"], + "format": "date-time", + "description": "Date and time when order was processed" + }, + "FulfilmentLocationId": { + "type": "string", + "description": "Location ID" + }, + "GeneralInfo": { + "type": "object", + "description": "General information about order", + "additionalProperties": false, + "properties": { + "Status": { + "type": "integer", + "description": "Order Status (0 = UNPAID, 1 = PAID, 2 = RETURN, 3 = PENDING, 4 = RESEND)" + }, + "LabelPrinted": { + "type": "boolean", + "description": "Is label printed" + }, + "LabelError": { + "type": "string", + "description": "Is there a label error" + }, + "InvoicePrinted": { + "type": "boolean", + "description": "Is invoice printed" + }, + "PickListPrinted": { + "type": "boolean", + "description": "Is pick list printed" + }, + "IsRuleRun": { + "type": "boolean", + "description": "If rules engine rule ran on an order" + }, + "Notes": { + "type": "integer", + "description": "Quantity of order notes" + }, + "PartShipped": { + "type": "boolean", + "description": "If order partly shipped" + }, + "Marker": { + "type": ["null", "integer"], + "description": "Order marker (0 = NOT TAG, 1 = Tag 1, 2 = Tag 2, 3 = Tag 3, 4 = Tag 4, 5 = Tag 5, 6 = Tag 6, 7 = Parked)" + }, + "IsParked": { + "type": "boolean", + "description": "Is the order parked?" + }, + "Identifiers": { + "type": "array", + "description": "Order identifiers. [Prime | Scheduled]", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "IdentifierId": { + "type": "integer", + "description": "Internal identifier id. Use to update image and name." + }, + "IsCustom": { + "type": "boolean", + "description": "Is the tag user or system defined?" + }, + "ImageId": { + "type": "string" + }, + "ImageUrl": { + "type": "string" + }, + "Tag": { + "type": "string", + "description": "Internal tag for identification purposes" + }, + "Name": { + "type": "string", + "description": "Name displayed where the tag is used" + } + } + } + }, + "ReferenceNum": { + "type": "string", + "description": "Order reference number (Channel defined)" + }, + "SecondaryReference": { + "type": "string", + "description": "An additional reference number for the orderr (Used by some channels)" + }, + "ExternalReferenceNum": { + "type": "string", + "description": "This is an additional reference number from the sales channel, typically used by eBay" + }, + "ReceivedDate": { + "type": "string", + "format": "date-time", + "description": "The date and time at which the order was placed on the sales channel" + }, + "Source": { + "type": "string", + "description": "Order ChannelName/Source (e.g. EBAY)" + }, + "SubSource": { + "type": "string", + "description": "Order Subsource (e.g. EBAY1)" + }, + "SiteCode": { + "type": "string", + "description": "SiteCode used to differentiate between different sites from a single channel (eg. Amazon UK, Amazon US, Amazon FR...)" + }, + "HoldOrCancel": { + "type": "boolean", + "description": "This shows whether the order has been marked as on hold, for processed orders if the order has been cancelled OnHold = 1" + }, + "DespatchByDate": { + "type": "string", + "format": "date-time", + "description": "Despatch by Date" + }, + "ScheduledDelivery": { + "type": "object", + "description": "Scheduled delivery dates. Take priority over despatch by date", + "additionalProperties": false, + "properties": { + "From": { + "type": "string", + "format": "date-time" + }, + "To": { + "type": "string", + "format": "date-time" + } + } + }, + "HasScheduledDelivery": { + "type": "boolean" + }, + "Location": { + "type": "string", + "description": "Order location ID" + }, + "NumItems": { + "type": "integer", + "description": "Quantity of order items" + }, + "PickwaveIds": { + "type": "array", + "description": "All related Pickwave Ids", + "items": { + "type": "integer" + } + }, + "StockAllocationType": { + "type": ["null", "string"] + } + } + }, + "ShippingInfo": { + "type": "object", + "description": "Order shipping information", + "additionalProperties": false, + "properties": { + "Vendor": { + "type": "string", + "description": "Courier name (e.g. Royal Mail)" + }, + "PostalServiceId": { + "type": "string", + "description": "Postal service ID" + }, + "PostalServiceName": { + "type": "string", + "description": "Postal service name (e.g. Next day delivery)" + }, + "TotalWeight": { + "type": "number", + "description": "Order total weight" + }, + "ItemWeight": { + "type": "number", + "description": "If order is processed" + }, + "PackageCategoryId": { + "type": "string", + "description": "Package category ID" + }, + "PackageCategory": { + "type": "string", + "description": "Package category name" + }, + "PackageTypeId": { + "type": ["null", "string"], + "description": "Package type ID" + }, + "PackageType": { + "type": "string", + "description": "Package type name" + }, + "PostageCost": { + "type": "number", + "description": "Order postage cost" + }, + "PostageCostExTax": { + "type": "number", + "description": "Order postage cost excluding tax" + }, + "TrackingNumber": { + "type": "string", + "description": "Order tracking number provided by courier" + }, + "ManualAdjust": { + "type": "boolean", + "description": "If there is an adjustment to shipping cost was made" + } + } + }, + "CustomerInfo": { + "type": "object", + "description": "Order Customer information (Name, email etc)", + "additionalProperties": false, + "properties": { + "ChannelBuyerName": { + "type": "string", + "description": "Username of customer (Comes from channel)" + }, + "Address": { + "type": "object", + "description": "Customer address", + "additionalProperties": false, + "properties": { + "EmailAddress": { + "type": "string", + "description": "Customer's email address." + }, + "Address1": { + "type": "string", + "description": "First line of customer address." + }, + "Address2": { + "type": "string", + "description": "Second line of customer address." + }, + "Address3": { + "type": "string", + "description": "Third line of customer address." + }, + "Town": { + "type": "string", + "description": "Customer's town." + }, + "Region": { + "type": "string", + "description": "Customer's region." + }, + "PostCode": { + "type": "string", + "description": "Customer's postcode." + }, + "Country": { + "type": "string", + "description": "Customer's country." + }, + "Continent": { + "type": "string", + "description": "Customer's continent" + }, + "FullName": { + "type": "string", + "description": "Customer's first and second name." + }, + "Company": { + "type": "string", + "description": "Customer's company name." + }, + "PhoneNumber": { + "type": "string", + "description": "Customer's telephone number." + }, + "CountryId": { + "type": "string" + } + } + }, + "BillingAddress": { + "type": "object", + "description": "Customer billing address", + "additionalProperties": false, + "properties": { + "EmailAddress": { + "type": "string", + "description": "Customer's email address." + }, + "Address1": { + "type": "string", + "description": "First line of customer address." + }, + "Address2": { + "type": "string", + "description": "Second line of customer address." + }, + "Address3": { + "type": "string", + "description": "Third line of customer address." + }, + "Town": { + "type": "string", + "description": "Customer's town." + }, + "Region": { + "type": "string", + "description": "Customer's region." + }, + "PostCode": { + "type": "string", + "description": "Customer's postcode." + }, + "Country": { + "type": "string", + "description": "Customer's country." + }, + "Continent": { + "type": "string", + "description": "Customer's continent" + }, + "FullName": { + "type": "string", + "description": "Customer's first and second name." + }, + "Company": { + "type": "string", + "description": "Customer's company name." + }, + "PhoneNumber": { + "type": "string", + "description": "Customer's telephone number." + }, + "CountryId": { + "type": "string" + } + } + } + } + }, + "TotalsInfo": { + "type": "object", + "description": "Order totals information", + "additionalProperties": false, + "properties": { + "pkOrderId": { + "type": "string", + "description": "Order Id" + }, + "Subtotal": { + "type": "number", + "description": "Order subtotal" + }, + "PostageCost": { + "type": "number", + "description": "Order postage cost" + }, + "PostageCostExTax": { + "type": "number", + "description": "Order postage cost ex. tax" + }, + "Tax": { + "type": "number", + "description": "Tax" + }, + "TotalCharge": { + "type": "number", + "description": "Total charge" + }, + "PaymentMethod": { + "type": "string", + "description": "Payment method" + }, + "PaymentMethodId": { + "type": "string", + "description": "Payment method ID" + }, + "ProfitMargin": { + "type": "number", + "description": "Profit margin" + }, + "TotalDiscount": { + "type": "number", + "description": "Total discount applied to the order" + }, + "Currency": { + "type": "string", + "description": "Order currency" + }, + "CountryTaxRate": { + "type": "number", + "description": "Country tax rate" + }, + "ConversionRate": { + "type": "number", + "description": "Currency conversion rate. Set at point of save by the currency" + } + } + }, + "ExtendedProperties": { + "type": "array", + "description": "Extended properties of an order", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "RowId": { + "type": "string", + "description": "Record row ID" + }, + "Name": { + "type": "string", + "description": "Extended property name" + }, + "Value": { + "type": "string", + "description": "Extended property value" + }, + "Type": { + "type": "string", + "description": "Extended property type" + } + } + } + }, + "FolderName": { + "type": "array", + "description": "Folder names assigned to an order", + "items": { + "type": "string" + } + }, + "Items": { + "type": "array", + "description": "List of order items", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "ItemId": { + "type": "string", + "description": "Stock Item ID" + }, + "ItemNumber": { + "type": "string", + "description": "Item number as on channel" + }, + "SKU": { + "type": "string", + "description": "Product SKU" + }, + "ItemSource": { + "type": "string", + "description": "Item source / channel name" + }, + "Title": { + "type": "string", + "description": "Item title" + }, + "Quantity": { + "type": "integer", + "description": "Quantity" + }, + "CategoryId": { + "type": "string" + }, + "CategoryName": { + "type": "string", + "description": "Product category" + }, + "CompositeAvailablity": { + "type": ["null", "integer"], + "description": "Composite availability" + }, + "StockLevelsSpecified": { + "type": "boolean", + "description": "If stock level specified" + }, + "OnOrder": { + "type": "integer", + "description": "Level due in purchase orders" + }, + "OnPurchaseOrder": { + "type": "object", + "description": "Purchase order bound to this item", + "additionalProperties": false, + "properties": { + "pkPurchaseItemId": { + "type": "string", + "description": "Primary key of the bound" + }, + "Rowid": { + "type": "string" + }, + "pkPurchaseId": { + "type": "string" + }, + "ExternalInvoiceNumber": { + "type": "string" + }, + "fkSupplierId": { + "type": "string" + }, + "DateOfDelivery": { + "type": "string", + "format": "date-time" + }, + "QuotedDeliveryDate": { + "type": "string", + "format": "date-time" + }, + "SupplierName": { + "type": "string" + }, + "fkLocationId": { + "type": "string" + } + } + }, + "InOrderBook": { + "type": ["null", "integer"], + "description": "Quantity currently in open orders" + }, + "Level": { + "type": "integer", + "description": "Current stock level" + }, + "MinimumLevel": { + "type": ["null", "integer"], + "description": "Minimum level" + }, + "AvailableStock": { + "type": "integer", + "description": "Currently available stock level (Level-InOrderBook)" + }, + "PricePerUnit": { + "type": "number", + "description": "Unit price" + }, + "UnitCost": { + "type": "number", + "description": "Unit cost" + }, + "DespatchStockUnitCost": { + "type": "number", + "description": "Despatch stock unit cost" + }, + "Discount": { + "type": "number", + "description": "Percentage (0%, 10%, 20%, etc...)" + }, + "Tax": { + "type": "number", + "description": "Actual tax value on an item" + }, + "TaxRate": { + "type": "number", + "description": "Tax rate" + }, + "Cost": { + "type": "number", + "description": "Total item cost (exc tax)" + }, + "CostIncTax": { + "type": "number", + "description": "Total item cost (inc tax)" + }, + "CompositeSubItems": { + "$comment": "It should be \"$ref\": \"#/properties/Items\" but Airbyte doesn't support recursive $refs.", + "type": "array", + "items": { + "type": "object" + } + }, + "IsService": { + "type": "boolean", + "description": "if item is a service" + }, + "SalesTax": { + "type": "number", + "description": "Sales Tax" + }, + "TaxCostInclusive": { + "type": "boolean", + "description": "If tax is included in a cost" + }, + "PartShipped": { + "type": "boolean", + "description": "If order is partly shipped" + }, + "Weight": { + "type": "number", + "description": "Order weight" + }, + "BarcodeNumber": { + "type": "string", + "description": "Product barcode" + }, + "Market": { + "type": "integer", + "description": "Market" + }, + "ChannelSKU": { + "type": "string", + "description": "Channel product SKU" + }, + "ChannelTitle": { + "type": "string", + "description": "Channel product title" + }, + "DiscountValue": { + "type": "number" + }, + "HasImage": { + "type": "boolean", + "description": "If item got an image" + }, + "ImageId": { + "type": ["null", "string"], + "description": "Image ID" + }, + "AdditionalInfo": { + "type": "array", + "description": "List of order item options", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "pkOptionId": { + "type": "string", + "description": "Option ID" + }, + "Property": { + "type": "string", + "description": "Option property" + }, + "Value": { + "type": "string", + "description": "Value of the option" + } + } + } + }, + "StockLevelIndicator": { + "type": "integer", + "description": "Stock level indicator" + }, + "ShippingCost": { + "type": "number", + "description": "If batch number scan required" + }, + "PartShippedQty": { + "type": "integer", + "description": "ShippingCost" + }, + "ItemName": { + "type": "string", + "description": "PartShippedQty" + }, + "BatchNumberScanRequired": { + "type": "boolean", + "description": "ItemName" + }, + "SerialNumberScanRequired": { + "type": "boolean", + "description": "If serial number scan required" + }, + "BinRack": { + "type": "string", + "description": "Binrack location" + }, + "BinRacks": { + "type": "array", + "description": "List of BinRacks used for OrderItem", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "Quantity": { + "type": "integer", + "description": "Quantity for BinRack per Location" + }, + "BinRack": { + "type": "string", + "description": "BinRack" + }, + "Location": { + "type": "string", + "description": "LocationId of the BinRack" + }, + "BatchId": { + "type": ["null", "integer"], + "description": "If the item is batched, identifies the batch number" + }, + "OrderItemBatchId": { + "type": ["null", "integer"], + "description": "If the item is batched, identifies the unique order item batch row" + } + } + } + }, + "InventoryTrackingType": { + "type": "integer", + "description": "Identifies whether the item has a sell by date or other defined order in which inventory is to be sold" + }, + "isBatchedStockItem": { + "type": "boolean", + "description": "If item has batches" + }, + "IsWarehouseManaged": { + "type": "boolean" + }, + "IsUnlinked": { + "type": "boolean" + }, + "ParentItemId": { + "type": "string" + }, + "StockItemIntId": { + "type": "integer" + }, + "Boxes": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "BoxId": { + "type": "integer", + "description": "Unique box id." + }, + "StockItemIntId": { + "type": "integer" + }, + "BoxName": { + "type": "string", + "description": "Box name max 16 characters" + }, + "Width": { + "type": "number", + "description": "Width of the box" + }, + "Height": { + "type": "number", + "description": "Height of the box" + }, + "Length": { + "type": "number", + "description": "Depth of the box" + }, + "Weight": { + "type": "number", + "description": "Total weight of the box." + }, + "ValuePercentage": { + "type": "number", + "description": "Value break down percentage" + }, + "Barcode": { + "type": "string", + "description": "Box barcode, max 64 characters." + }, + "PackagingTypeId": { + "type": "string", + "description": "Packaging type id" + }, + "LogicalDelete": { + "type": "boolean", + "description": "IsDeleted flag." + } + } + } + }, + "RowId": { + "type": "string", + "description": "Record row ID" + }, + "OrderId": { + "type": "string", + "description": "Order ID (pkOrderID)" + }, + "StockItemId": { + "type": "string", + "description": "Stock Item ID" + }, + "StockId": { + "type": "string" + } + } + } + }, + "Notes": { + "type": "array", + "description": "List of order notes", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "OrderNoteId": { + "type": "string", + "description": "Order note ID" + }, + "OrderId": { + "type": "string", + "description": "Order Id" + }, + "NoteDate": { + "type": "string", + "format": "date-time", + "description": "Date and time when note was added" + }, + "Internal": { + "type": "boolean", + "description": "order note type (Internal or External)" + }, + "Note": { + "type": "string", + "description": "Note's text" + }, + "CreatedBy": { + "type": "string", + "description": "User that created note" + }, + "NoteTypeId": { + "type": ["null", "string"] + } + } + } + }, + "PaidDateTime": { + "type": ["null", "string"], + "format": "date-time", + "description": "Date and time when the order was marked as paid" + }, + "TaxId": { + "type": "string", + "description": "Buyer's tax number." + } + } + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["ProcessedDateTime"], + "source_defined_primary_key": [["NumOrderId"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append_dedup" } ] } diff --git a/airbyte-integrations/connectors/source-linnworks/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-linnworks/integration_tests/sample_state.json index fc0bacca9d82..023f9db9a5dc 100644 --- a/airbyte-integrations/connectors/source-linnworks/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-linnworks/integration_tests/sample_state.json @@ -1,5 +1,8 @@ { "processed_orders": { - "dReceivedDate": "2021-01-01T00:00:00+00:00" + "dProcessedOn": "2021-11-24T00:00:00+00:00" + }, + "processed_order_details": { + "ProcessedDateTime": "2021-11-24T00:00:00+00:00" } } diff --git a/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/processed_order_details.json b/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/processed_order_details.json index a5afaaf4ca60..f7c206789d2d 100644 --- a/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/processed_order_details.json +++ b/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/processed_order_details.json @@ -182,7 +182,7 @@ "additionalProperties": false, "properties": { "Vendor": { - "type": "string", + "type": ["null", "string"], "description": "Courier name (e.g. Royal Mail)" }, "PostalServiceId": { @@ -206,7 +206,7 @@ "description": "Package category ID" }, "PackageCategory": { - "type": "string", + "type": ["null", "string"], "description": "Package category name" }, "PackageTypeId": { @@ -470,7 +470,7 @@ "description": "Item number as on channel" }, "SKU": { - "type": "string", + "type": ["null", "string"], "description": "Product SKU" }, "ItemSource": { @@ -690,7 +690,7 @@ "description": "If serial number scan required" }, "BinRack": { - "type": "string", + "type": ["null", "string"], "description": "Binrack location" }, "BinRacks": { diff --git a/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/processed_orders.json b/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/processed_orders.json index f6c5e796183f..c2a7addf04b5 100644 --- a/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/processed_orders.json +++ b/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/processed_orders.json @@ -8,11 +8,11 @@ "description": "Order ID" }, "cShippingAddress": { - "type": "string", + "type": ["null", "string"], "description": "Customer's shipping address" }, "dReceivedDate": { - "type": "string", + "type": ["null", "string"], "format": "date-time", "description": "Date when order was received on a channel" }, @@ -90,11 +90,11 @@ "description": "Postal service code" }, "Vendor": { - "type": "string", + "type": ["null", "string"], "description": "Courier name (e.g. DPD)" }, "BillingEmailAddress": { - "type": "string" + "type": ["null", "string"] }, "ReferenceNum": { "type": "string", @@ -171,11 +171,11 @@ "description": "When order was cancelled" }, "PackageCategory": { - "type": "string", + "type": ["null", "string"], "description": "Package category" }, "PackageTitle": { - "type": "string", + "type": ["null", "string"], "description": "Package name" }, "ItemWeight": { @@ -187,51 +187,51 @@ "description": "Total order weight" }, "FolderCollection": { - "type": "string", + "type": ["null", "string"], "description": "Folder name of an order" }, "cBillingAddress": { - "type": "string", + "type": ["null", "string"], "description": "Customer billing address" }, "BillingName": { - "type": "string", + "type": ["null", "string"], "description": "Customer billing name" }, "BillingCompany": { - "type": "string", + "type": ["null", "string"], "description": "Customer billing company" }, "BillingAddress1": { - "type": "string", + "type": ["null", "string"], "description": "Billing address line one" }, "BillingAddress2": { - "type": "string", + "type": ["null", "string"], "description": "Billing address line two" }, "BillingAddress3": { - "type": "string", + "type": ["null", "string"], "description": "Billing address line three" }, "BillingTown": { - "type": "string", + "type": ["null", "string"], "description": "Billing town" }, "BillingRegion": { - "type": "string", + "type": ["null", "string"], "description": "Billing region, area, county" }, "BillingPostCode": { - "type": "string", + "type": ["null", "string"], "description": "Billing postcode" }, "BillingCountryName": { - "type": "string", + "type": ["null", "string"], "description": "Billing country" }, "BillingPhoneNumber": { - "type": "string", + "type": ["null", "string"], "description": "Billing phone number" }, "HoldOrCancel": { diff --git a/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/stock_items.json b/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/stock_items.json index c878db0ee633..115c4ccdc98e 100644 --- a/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/stock_items.json +++ b/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/stock_items.json @@ -110,7 +110,7 @@ "description": "Location tag" }, "BinRack": { - "type": "string", + "type": ["null", "string"], "description": "Bin rack" }, "IsWarehouseManaged": { @@ -156,7 +156,7 @@ "description": "if( Quantity == 0 ) dbo.StockItem.PurchasePrice Else CurrentStockValue / Quantity" }, "SKU": { - "type": "string", + "type": ["null", "string"], "description": "Product SKU" }, "AutoAdjust": { @@ -380,7 +380,7 @@ "description": "Url to full size image" }, "CheckSumValue": { - "type": "string", + "type": ["null", "string"], "description": "Image check sum" }, "pkRowId": { @@ -396,11 +396,11 @@ "description": "Sort order for the image" }, "ChecksumValue": { - "type": "string", + "type": ["null", "string"], "description": "Internal checksum value" }, "RawChecksum": { - "type": "string", + "type": ["null", "string"], "description": "Raw file checksum (Used for UI to determine if the image file is the same before submitting for upload)" }, "StockItemId": { @@ -467,7 +467,7 @@ "description": "Default package group id" }, "PackageGroupName": { - "type": "string", + "type": ["null", "string"], "description": "Default package group name" }, "Height": { diff --git a/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/stock_locations.json b/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/stock_locations.json index 8ecb0bf69a21..dfdde21d0c11 100644 --- a/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/stock_locations.json +++ b/airbyte-integrations/connectors/source-linnworks/source_linnworks/schemas/stock_locations.json @@ -24,7 +24,7 @@ "description": "Location tag" }, "BinRack": { - "type": "string", + "type": ["null", "string"], "description": "Bin rack" }, "IsWarehouseManaged": { diff --git a/airbyte-integrations/connectors/source-linnworks/source_linnworks/spec.json b/airbyte-integrations/connectors/source-linnworks/source_linnworks/spec.json index d1217998871c..7a310afa5c3c 100644 --- a/airbyte-integrations/connectors/source-linnworks/source_linnworks/spec.json +++ b/airbyte-integrations/connectors/source-linnworks/source_linnworks/spec.json @@ -1,5 +1,5 @@ { - "documentationUrl": "https://docsurl.com", + "documentationUrl": "https://docs.airbyte.io/integrations/sources/linnworks", "connectionSpecification": { "$schema": "http://json-schema.org/draft-07/schema#", "title": "Linnworks Spec", diff --git a/airbyte-integrations/connectors/source-linnworks/source_linnworks/streams.py b/airbyte-integrations/connectors/source-linnworks/source_linnworks/streams.py index 8954264048cd..676e1c375fbe 100644 --- a/airbyte-integrations/connectors/source-linnworks/source_linnworks/streams.py +++ b/airbyte-integrations/connectors/source-linnworks/source_linnworks/streams.py @@ -172,7 +172,7 @@ class ProcessedOrders(LinnworksGenericPagedResult, IncrementalLinnworksStream): # Response: SearchProcessedOrdersResponse https://apps.linnworks.net/Api/Class/API_Linnworks-Controllers-ProcessedOrders-Responses-SearchProcessedOrdersResponse # Allows 150 calls per minute primary_key = "nOrderId" - cursor_field = "dReceivedDate" + cursor_field = "dProcessedOn" page_size = 500 use_cache = True @@ -207,12 +207,12 @@ def request_body_data( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None ) -> MutableMapping[str, Any]: request = { - "DateField": "received", + "DateField": "processed", "FromDate": stream_slice["FromDate"], "ToDate": stream_slice["ToDate"], "PageNumber": 1 if not next_page_token else next_page_token["PageNumber"], "ResultsPerPage": self.page_size, - "SearchSorting": {"SortField": "dReceivedDate", "SortDirection": "ASC"}, + "SearchSorting": {"SortField": "dProcessedOn", "SortDirection": "ASC"}, } return { @@ -240,11 +240,12 @@ def request_cache(self) -> Cassette: ) -class ProcessedOrderDetails(HttpSubStream, LinnworksStream): +class ProcessedOrderDetails(HttpSubStream, IncrementalLinnworksStream): # https://apps.linnworks.net/Api/Method/Orders-GetOrdersById # Response: List https://apps.linnworks.net/Api/Class/linnworks-spa-commondata-OrderManagement-ClassBase-OrderDetails # Allows 250 calls per minute primary_key = "NumOrderId" + cursor_field = "ProcessedDateTime" page_size = 100 def __init__(self, **kwargs): @@ -253,9 +254,13 @@ def __init__(self, **kwargs): def path(self, **kwargs) -> str: return "/api/Orders/GetOrdersById" - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + parent_stream_state = None + if stream_state: + parent_stream_state = {"dProcessedOn": stream_state["ProcessedDateTime"]} + buffer = [] - for slice in HttpSubStream.stream_slices(self, **kwargs): + for slice in HttpSubStream.stream_slices(self, stream_state=parent_stream_state, **kwargs): buffer.append(slice["parent"]["pkOrderID"]) if len(buffer) == self.page_size: yield buffer diff --git a/airbyte-integrations/connectors/source-linnworks/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-linnworks/unit_tests/test_incremental_streams.py index febe8764fccc..7ce246831db9 100644 --- a/airbyte-integrations/connectors/source-linnworks/unit_tests/test_incremental_streams.py +++ b/airbyte-integrations/connectors/source-linnworks/unit_tests/test_incremental_streams.py @@ -11,7 +11,9 @@ import pytest import requests import vcr -from source_linnworks.streams import IncrementalLinnworksStream, ProcessedOrders +from airbyte_cdk.models.airbyte_protocol import SyncMode +from airbyte_cdk.sources.streams.http.http import HttpSubStream +from source_linnworks.streams import IncrementalLinnworksStream, ProcessedOrderDetails, ProcessedOrders @pytest.fixture @@ -93,10 +95,10 @@ def date(*args): [ (None, None, 24, date(2050, 1, 1), date(2050, 1, 2)), (date(2050, 1, 2), None, 48, date(2050, 1, 1), date(2050, 1, 3)), - (None, {"dReceivedDate": date(2050, 1, 4)}, 1, date(2050, 1, 4), date(2050, 1, 4)), + (None, {"dProcessedOn": date(2050, 1, 4)}, 1, date(2050, 1, 4), date(2050, 1, 4)), ( date(2050, 1, 5), - {"dReceivedDate": date(2050, 1, 4)}, + {"dProcessedOn": date(2050, 1, 4)}, 48, date(2050, 1, 4), date(2050, 1, 6), @@ -104,7 +106,7 @@ def date(*args): ( # Yearly date(2052, 1, 1), - {"dReceivedDate": date(2050, 1, 1)}, + {"dProcessedOn": date(2050, 1, 1)}, 25, date(2050, 1, 1), date(2052, 1, 2), @@ -112,7 +114,7 @@ def date(*args): ( # Monthly date(2050, 4, 1), - {"dReceivedDate": date(2050, 1, 1)}, + {"dProcessedOn": date(2050, 1, 1)}, 13, date(2050, 1, 1), date(2050, 4, 2), @@ -120,7 +122,7 @@ def date(*args): ( # Weekly date(2050, 1, 31), - {"dReceivedDate": date(2050, 1, 1)}, + {"dProcessedOn": date(2050, 1, 1)}, 5, date(2050, 1, 1), date(2050, 2, 1), @@ -128,7 +130,7 @@ def date(*args): ( # Daily date(2050, 1, 1, 23, 59, 59), - {"dReceivedDate": date(2050, 1, 1)}, + {"dProcessedOn": date(2050, 1, 1)}, 24, date(2050, 1, 1), date(2050, 1, 2), @@ -213,3 +215,34 @@ def test_processed_orders_request_cache(patch_incremental_base_class, mocker): serializer="yaml", match_on=["method", "scheme", "host", "port", "path", "query", "body"], ) + + +@pytest.mark.parametrize( + ("count", "stream_state"), + [ + (5, None), + (205, None), + (5, {"ProcessedDateTime": "a-date"}), + ], +) +def test_processed_order_details_stream_slices(patch_incremental_base_class, mocker, count, stream_state): + parent_stream_slices = MagicMock(return_value=[{"parent": {"pkOrderID": str(n)}} for n in range(count)]) + mocker.patch.object(HttpSubStream, "stream_slices", parent_stream_slices) + + stream = ProcessedOrderDetails() + expected_slices = [[str(m) for m in range(count)[i : i + stream.page_size]] for i in range(0, count, stream.page_size)] + + stream_slices = stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state=stream_state) + + assert list(stream_slices) == list(expected_slices) + + actual_state = parent_stream_slices.call_args.kwargs["stream_state"] + if actual_state: + assert actual_state["dProcessedOn"] == stream_state["ProcessedDateTime"] + + +def test_processed_order_details_request_body_data(patch_incremental_base_class): + stream = ProcessedOrderDetails() + request_body_data = stream.request_body_data(None, ["abc", "def", "ghi"]) + + assert request_body_data == {"pkOrderIds": '["abc","def","ghi"]'} diff --git a/airbyte-integrations/connectors/source-linnworks/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-linnworks/unit_tests/test_streams.py index 397aaead93bf..c29a05f41053 100644 --- a/airbyte-integrations/connectors/source-linnworks/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-linnworks/unit_tests/test_streams.py @@ -6,15 +6,7 @@ import pytest import requests -from airbyte_cdk.models.airbyte_protocol import SyncMode -from source_linnworks.streams import ( - LinnworksStream, - ProcessedOrderDetails, - ProcessedOrders, - StockItems, - StockLocationDetails, - StockLocations, -) +from source_linnworks.streams import LinnworksStream, StockItems, StockLocationDetails, StockLocations @pytest.fixture @@ -158,31 +150,3 @@ def test_stock_items_request_params(mocker, requests_mock, next_page_token, expe assert ("NextPageTokenKey" in params) == expected if next_page_token: assert next_page_token.items() <= params.items() - - -@pytest.mark.parametrize( - ("count"), - [ - (5), - (205), - ], -) -def test_processed_order_details_stream_slices(patch_base_class, mocker, count): - parent_records = [{"pkOrderID": str(n)} for n in range(count)] - - mocker.patch.object(ProcessedOrders, "stream_slices", MagicMock(return_value=[{}])) - mocker.patch.object(ProcessedOrders, "read_records", MagicMock(return_value=parent_records)) - - stream = ProcessedOrderDetails() - expected_slices = [[str(m) for m in range(count)[i : i + stream.page_size]] for i in range(0, count, stream.page_size)] - - stream_slices = stream.stream_slices(sync_mode=SyncMode.full_refresh) - - assert list(stream_slices) == list(expected_slices) - - -def test_processed_order_details_request_body_data(patch_base_class): - stream = ProcessedOrderDetails() - request_body_data = stream.request_body_data(None, ["abc", "def", "ghi"]) - - assert request_body_data == {"pkOrderIds": '["abc","def","ghi"]'} diff --git a/airbyte-integrations/connectors/source-microsoft-teams/Dockerfile b/airbyte-integrations/connectors/source-microsoft-teams/Dockerfile index f5590d653aac..969e794249da 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/Dockerfile +++ b/airbyte-integrations/connectors/source-microsoft-teams/Dockerfile @@ -34,5 +34,5 @@ COPY source_microsoft_teams ./source_microsoft_teams ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.4 +LABEL io.airbyte.version=0.2.5 LABEL io.airbyte.name=airbyte/source-microsoft-teams diff --git a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/spec.json b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/spec.json index 230b1f9af530..442abfde2939 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/spec.json +++ b/airbyte-integrations/connectors/source-microsoft-teams/source_microsoft_teams/spec.json @@ -9,6 +9,7 @@ "properties": { "period": { "type": "string", + "title": "Period", "description": "Specifies the length of time over which the Team Device Report stream is aggregated. The supported values are: D7, D30, D90, and D180.", "examples": ["D7"] }, @@ -38,23 +39,23 @@ "tenant_id": { "title": "Directory (tenant) ID", "type": "string", - "description": "Directory (tenant) ID" + "description": "A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL" }, "client_id": { - "title": "Application (client) ID", + "title": "Client ID", "type": "string", - "description": "Application (client) ID" + "description": "The Client ID of your Microsoft Teams developer application." }, "client_secret": { "title": "Client Secret", "type": "string", - "description": "Client secret", + "description": "The Client Secret of your Microsoft Teams developer application.", "airbyte_secret": true }, "refresh_token": { "title": "Refresh Token", "type": "string", - "description": "A refresh token generated using the above client ID and secret", + "description": "A Refresh Token to renew the expired Access Token.", "airbyte_secret": true } } @@ -75,17 +76,17 @@ "tenant_id": { "title": "Directory (tenant) ID", "type": "string", - "description": "Directory (tenant) ID" + "description": "A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL" }, "client_id": { - "title": "Application (client) ID", + "title": "Client ID", "type": "string", - "description": "Application (client) ID" + "description": "The Client ID of your Microsoft Teams developer application." }, "client_secret": { "title": "Client Secret", "type": "string", - "description": "Client secret", + "description": "The Client Secret of your Microsoft Teams developer application.", "airbyte_secret": true } } diff --git a/airbyte-integrations/connectors/source-monday/Dockerfile b/airbyte-integrations/connectors/source-monday/Dockerfile index b41a04ee09f1..3a02c7d629cf 100644 --- a/airbyte-integrations/connectors/source-monday/Dockerfile +++ b/airbyte-integrations/connectors/source-monday/Dockerfile @@ -34,5 +34,5 @@ COPY source_monday ./source_monday ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/source-monday diff --git a/airbyte-integrations/connectors/source-monday/source_monday/spec.json b/airbyte-integrations/connectors/source-monday/source_monday/spec.json index 870dd5ac587d..d7bca8eb685c 100644 --- a/airbyte-integrations/connectors/source-monday/source_monday/spec.json +++ b/airbyte-integrations/connectors/source-monday/source_monday/spec.json @@ -1,5 +1,5 @@ { - "documentationUrl": "https://docsurl.com", + "documentationUrl": "https://docs.airbyte.io/integrations/sources/monday", "connectionSpecification": { "$schema": "http://json-schema.org/draft-07/schema#", "title": "Monday Spec", @@ -9,7 +9,8 @@ "properties": { "api_token": { "type": "string", - "description": "This is the API token to authenticate requests to Monday. Profile picture (bottom left) => Admin => API", + "title": "Personal Access Token", + "description": "Access Token for making authenticated requests.", "airbyte_secret": true } } diff --git a/airbyte-integrations/connectors/source-mongodb-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-mongodb-strict-encrypt/Dockerfile index 21d6d8274c36..c923d7e91c70 100644 --- a/airbyte-integrations/connectors/source-mongodb-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-mongodb-strict-encrypt/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-mongodb-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.6 LABEL io.airbyte.name=airbyte/source-mongodb-strict-encrypt diff --git a/airbyte-integrations/connectors/source-mongodb-v2/Dockerfile b/airbyte-integrations/connectors/source-mongodb-v2/Dockerfile index a76b9b06e95e..59834cae3a8f 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/Dockerfile +++ b/airbyte-integrations/connectors/source-mongodb-v2/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-mongodb-v2 -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.9 LABEL io.airbyte.name=airbyte/source-mongodb-v2 diff --git a/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile index 4334d611e77d..346ce5c3a917 100644 --- a/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-mssql-strict-encrypt/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-mssql-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.3 LABEL io.airbyte.name=airbyte/source-mssql-strict-encrypt diff --git a/airbyte-integrations/connectors/source-mssql/Dockerfile b/airbyte-integrations/connectors/source-mssql/Dockerfile index 0a263d574bbb..dff049aca56c 100644 --- a/airbyte-integrations/connectors/source-mssql/Dockerfile +++ b/airbyte-integrations/connectors/source-mssql/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-mssql -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.3.10 LABEL io.airbyte.name=airbyte/source-mssql diff --git a/airbyte-integrations/connectors/source-mssql/README.md b/airbyte-integrations/connectors/source-mssql/README.md index 4b239ea23d19..26bf5d6f386b 100644 --- a/airbyte-integrations/connectors/source-mssql/README.md +++ b/airbyte-integrations/connectors/source-mssql/README.md @@ -2,6 +2,17 @@ ## Performance Test +### Running performance tests with CPU and Memory limits for the container + +In order to run performance tests with CPU and Memory limits, you need to run the performance test start command with +additional parameters **cpulimit=cpulimit/YOU_CPU_LIMIT** and **memorylimit=memorylimit/YOU_MEMORY_LIMIT**. +**YOU_MEMORY_LIMIT** - RAM limit. Be sure to indicate the limit in MB or GB at the end. Minimum size - 6MB. +**YOU_CPU_LIMIT** - CPU limit. Minimum size - 2. +These parameters are optional and can be used separately from each other. +For example, if you use only **memorylimit=memorylimit/2GB**, only the memory limit for the container will be set, the CPU will not be limited. +Also, if you do not use both of these parameters, then performance tests will run without memory and CPU limitations. + + ### Use MsSQL script to populate the benchmark database In order to create a database with a certain number of tables, and a certain number of records in each of them, diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile index 851d901bd963..4229c5f0df84 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-mysql-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/source-mysql-strict-encrypt diff --git a/airbyte-integrations/connectors/source-mysql/Dockerfile b/airbyte-integrations/connectors/source-mysql/Dockerfile index 67f3a8255fe8..2e76b2839dbd 100644 --- a/airbyte-integrations/connectors/source-mysql/Dockerfile +++ b/airbyte-integrations/connectors/source-mysql/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-mysql -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.5.1 diff --git a/airbyte-integrations/connectors/source-mysql/README.md b/airbyte-integrations/connectors/source-mysql/README.md index 1f15ee12662e..4f968d6c4ba6 100644 --- a/airbyte-integrations/connectors/source-mysql/README.md +++ b/airbyte-integrations/connectors/source-mysql/README.md @@ -37,6 +37,15 @@ To run performance tests: ``` ./gradlew :airbyte-integrations:connectors:source-mysql:performanceTest ``` +### Running performance tests with CPU and Memory limits for the container + +In order to run performance tests with CPU and Memory limits, you need to run the performance test start command with +additional parameters **cpulimit=cpulimit/YOU_CPU_LIMIT** and **memorylimit=memorylimit/YOU_MEMORY_LIMIT**. +**YOU_MEMORY_LIMIT** - RAM limit. Be sure to indicate the limit in MB or GB at the end. Minimum size - 6MB. +**YOU_CPU_LIMIT** - CPU limit. Minimum size - 2. +These parameters are optional and can be used separately from each other. +For example, if you use only **memorylimit=memorylimit/2GB**, only the memory limit for the container will be set, the CPU will not be limited. +Also, if you do not use both of these parameters, then performance tests will run without memory and CPU limitations. ### Use MySQL script to populate the benchmark database diff --git a/airbyte-integrations/connectors/source-openweather/.dockerignore b/airbyte-integrations/connectors/source-openweather/.dockerignore new file mode 100644 index 000000000000..6ed52d920dd8 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/.dockerignore @@ -0,0 +1,7 @@ +* +!Dockerfile +!Dockerfile.test +!main.py +!source_openweather +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-openweather/Dockerfile b/airbyte-integrations/connectors/source-openweather/Dockerfile new file mode 100644 index 000000000000..deda9952b9e5 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.7.11-alpine3.14 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_openweather ./source_openweather + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-openweather diff --git a/airbyte-integrations/connectors/source-openweather/README.md b/airbyte-integrations/connectors/source-openweather/README.md new file mode 100644 index 000000000000..0c787fe03304 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/README.md @@ -0,0 +1,132 @@ +# Open Weather Source + +This is the repository for the Open Weather source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/open-weather). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-openweather:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/open-weather) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_openweather/spec.json` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source open-weather test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-openweather:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-openweather:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-openweather:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-openweather:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-openweather:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-openweather:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-openweather:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-openweather:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-openweather/acceptance-test-config.yml b/airbyte-integrations/connectors/source-openweather/acceptance-test-config.yml new file mode 100644 index 000000000000..7088a5abbeb1 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/acceptance-test-config.yml @@ -0,0 +1,18 @@ +connector_image: airbyte/source-openweather:dev +tests: + spec: + - spec_path: "source_openweather/spec.json" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + incremental: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" diff --git a/airbyte-integrations/connectors/source-openweather/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-openweather/acceptance-test-docker.sh new file mode 100755 index 000000000000..e4d8b1cef896 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-openweather/bootstrap.md b/airbyte-integrations/connectors/source-openweather/bootstrap.md new file mode 100644 index 000000000000..1ded59966a97 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/bootstrap.md @@ -0,0 +1,16 @@ +# OpenWeather +OpenWeather is an online service offering an API to retrieve historical, current and forecasted weather data over the globe. + +## One Call API +The *One Call API* enable retrieval of multiple weather data for a location in a single call. +I made this stream implementation a priority because it has a free plan that might be valuable for all data teams building models around weather data. +The API returns current weather data along with other time resolutions (minutely, hourly, daily) and weather alerts. + +### Full refresh vs incremental stream implementation +I did not implement a full refresh stream because One Call API calls are not idempotent: two subsequents calls with the same parameters might give different results. Moreover, it has no historical capabilities (there is a specific historical API for that) and only gives current weather conditions and forecasts. It's why I implemented an incremental stream without a feature to request past data. + +### Auth +API calls are authenticated through an API key passed in a query string parameter (`appid`). API keys can be generated from OpenWeather's user account panel. + +### Rate limits +The API does have some rate limiting logic but it's not very transparent to the user. There is no endpoint to check calls consumption. It is stated that the free plan allows 60 calls / minute or 1,000,000 calls/month. If the limit is exceeded the user account (not only the API key) gets blocked for an unknown duration. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-openweather/build.gradle b/airbyte-integrations/connectors/source-openweather/build.gradle new file mode 100644 index 000000000000..846d5dbf3369 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/build.gradle @@ -0,0 +1,13 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_openweather' +} + +dependencies { + implementation files(project(':airbyte-integrations:bases:source-acceptance-test').airbyteDocker.outputs) +} diff --git a/airbyte-integrations/connectors/source-openweather/integration_tests/__init__.py b/airbyte-integrations/connectors/source-openweather/integration_tests/__init__.py new file mode 100644 index 000000000000..46b7376756ec --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-openweather/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-openweather/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..b29bfc93799f --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "one_call": { + "dt": 2635338369 + } +} diff --git a/airbyte-integrations/connectors/source-openweather/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-openweather/integration_tests/acceptance.py new file mode 100644 index 000000000000..0347f2a0b143 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-openweather/integration_tests/catalog.json b/airbyte-integrations/connectors/source-openweather/integration_tests/catalog.json new file mode 100644 index 000000000000..c0e8035aba02 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/integration_tests/catalog.json @@ -0,0 +1,12 @@ +{ + "type": "CATALOG", + "catalog": { + "streams": [ + { + "name": "one_call", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + } + ] + } +} diff --git a/airbyte-integrations/connectors/source-openweather/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-openweather/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..fa34fa13e3c5 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "one_call", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-openweather/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-openweather/integration_tests/invalid_config.json new file mode 100644 index 000000000000..2e8d790bcf91 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "lat": "12.1", + "lon": "-43.1", + "appid": "wrongkey" +} diff --git a/airbyte-integrations/connectors/source-openweather/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-openweather/integration_tests/sample_config.json new file mode 100644 index 000000000000..36a6498f7ccf --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "appid": "my-api-key", + "lat": "-21.24239", + "lon": "55.71004" +} diff --git a/airbyte-integrations/connectors/source-openweather/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-openweather/integration_tests/sample_state.json new file mode 100644 index 000000000000..2b7ca2ebdb9e --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/integration_tests/sample_state.json @@ -0,0 +1,7 @@ +{ + "one_call": { + "current": { + "dt": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-openweather/main.py b/airbyte-integrations/connectors/source-openweather/main.py new file mode 100644 index 000000000000..3fe8ba844801 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_openweather import SourceOpenWeather + +if __name__ == "__main__": + source = SourceOpenWeather() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-openweather/requirements.txt b/airbyte-integrations/connectors/source-openweather/requirements.txt new file mode 100644 index 000000000000..0411042aa091 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-openweather/setup.py b/airbyte-integrations/connectors/source-openweather/setup.py new file mode 100644 index 000000000000..449b4501eb90 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_openweather", + description="Source implementation for Open Weather.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-openweather/source_openweather/__init__.py b/airbyte-integrations/connectors/source-openweather/source_openweather/__init__.py new file mode 100644 index 000000000000..36eca1bee2ec --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/source_openweather/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceOpenWeather + +__all__ = ["SourceOpenWeather"] diff --git a/airbyte-integrations/connectors/source-openweather/source_openweather/extra_validations.py b/airbyte-integrations/connectors/source-openweather/source_openweather/extra_validations.py new file mode 100644 index 000000000000..4c9cd2043600 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/source_openweather/extra_validations.py @@ -0,0 +1,33 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Mapping + + +def check_lat(lat_value) -> float: + try: + lat_value = float(lat_value) + except (ValueError, TypeError): + raise Exception("Wrong value for lat, it must be a decimal number between -90 and 90") + if not -90 <= lat_value <= 90: + raise Exception("Wrong value for lat, it must be between -90 and 90") + return lat_value + + +def check_lon(lon_value) -> float: + try: + lon_value = float(lon_value) + except (ValueError, TypeError): + raise Exception("Wrong value for lon, it must be a decimal number between -180 and 180") + + if not -180 <= lon_value <= 180: + raise Exception("Wrong value for lon, it must be between -180 and 180") + return lon_value + + +def validate(config: Mapping[str, Any]) -> Mapping[str, Any]: + valid_config = {**config} + valid_config["lat"] = check_lat(valid_config["lat"]) + valid_config["lon"] = check_lon(valid_config["lon"]) + return valid_config diff --git a/airbyte-integrations/connectors/source-openweather/source_openweather/schemas/one_call.json b/airbyte-integrations/connectors/source-openweather/source_openweather/schemas/one_call.json new file mode 100644 index 000000000000..53acab5894ae --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/source_openweather/schemas/one_call.json @@ -0,0 +1,85 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "lat": { + "type": "number" + }, + "lon": { + "type": "number" + }, + "timezone": { + "type": "string" + }, + "timezone_offset": { + "type": "number" + }, + "current": { + "type": "object", + "properties": { + "dt": { + "type": "number" + }, + "sunrise": { + "type": "number" + }, + "sunset": { + "type": "number" + }, + "temp": { + "type": "number" + }, + "feels_like": { + "type": "number" + }, + "pressure": { + "type": "number" + }, + "humidity": { + "type": "number" + }, + "dew_point": { + "type": "number" + }, + "uvi": { + "type": "number" + }, + "clouds": { + "type": "number" + }, + "visibility": { + "type": "number" + }, + "wind_speed": { + "type": "number" + }, + "wind_deg": { + "type": "number" + }, + "weather": { + "type": "array" + }, + "rain": { + "type": "object", + "properties": { + "1h": { + "type": "number" + } + } + } + } + }, + "minutely": { + "type": "array" + }, + "hourly": { + "type": "array" + }, + "daily": { + "type": "array" + }, + "alerts": { + "type": "array" + } + } +} diff --git a/airbyte-integrations/connectors/source-openweather/source_openweather/source.py b/airbyte-integrations/connectors/source-openweather/source_openweather/source.py new file mode 100644 index 000000000000..8f2aa8f21a28 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/source_openweather/source.py @@ -0,0 +1,46 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from typing import Any, List, Mapping, Tuple + +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream + +from . import extra_validations, streams + + +class SourceOpenWeather(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + try: + valid_config = extra_validations.validate(config) + params = { + "appid": valid_config["appid"], + "lat": valid_config["lat"], + "lon": valid_config["lon"], + "lang": valid_config.get("lang"), + "units": valid_config.get("units"), + } + params = {k: v for k, v in params.items() if v is not None} + resp = requests.get(f"{streams.OneCall.url_base}onecall", params=params) + status = resp.status_code + if status == 200: + return True, None + else: + message = resp.json().get("message") + return False, message + except Exception as e: + return False, e + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + valid_config = extra_validations.validate(config) + return [ + streams.OneCall( + appid=valid_config["appid"], + lat=valid_config["lat"], + lon=valid_config["lon"], + lang=valid_config.get("lang"), + units=valid_config.get("units"), + ) + ] diff --git a/airbyte-integrations/connectors/source-openweather/source_openweather/spec.json b/airbyte-integrations/connectors/source-openweather/source_openweather/spec.json new file mode 100644 index 000000000000..61ffbc8b6961 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/source_openweather/spec.json @@ -0,0 +1,96 @@ +{ + "documentationUrl": "https://docsurl.com", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Open Weather Spec", + "type": "object", + "required": ["appid", "lat", "lon"], + "additionalProperties": false, + "properties": { + "lat": { + "title": "Latitude", + "type": "string", + "pattern": "^[-]?\\d{1,2}(\\.\\d+)?$", + "examples": ["45.7603", "-21.249107858038816"], + "description": "Latitude for which you want to get weather condition from. (min -90, max 90)" + }, + "lon": { + "title": "Longitude", + "type": "string", + "pattern": "^[-]?\\d{1,3}(\\.\\d+)?$", + "examples": ["4.835659", "-70.39482074115321"], + "description": "Longitude for which you want to get weather condition from. (min -180, max 180)" + }, + "appid": { + "title": "App ID", + "type": "string", + "description": "Your OpenWeather API Key. See here. The key is case sensitive.", + "airbyte_secret": true + }, + "units": { + "title": "Units", + "type": "string", + "description": "Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default.", + "enum": ["standard", "metric", "imperial"], + "examples": ["standard", "metric", "imperial"] + }, + "lang": { + "title": "Language", + "type": "string", + "description": "You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages.", + "enum": [ + "af", + "al", + "ar", + "az", + "bg", + "ca", + "cz", + "da", + "de", + "el", + "en", + "eu", + "fa", + "fi", + "fr", + "gl", + "he", + "hi", + "hr", + "hu", + "id", + "it", + "ja", + "kr", + "la", + "lt", + "mk", + "no", + "nl", + "pl", + "pt", + "pt_br", + "ro", + "ru", + "sv", + "se", + "sk", + "sl", + "sp", + "es", + "sr", + "th", + "tr", + "ua", + "uk", + "vi", + "zh_cn", + "zh_tw", + "zu" + ], + "examples": ["en", "fr", "pt_br", "uk", "zh_cn", "zh_tw"] + } + } + } +} diff --git a/airbyte-integrations/connectors/source-openweather/source_openweather/streams.py b/airbyte-integrations/connectors/source-openweather/source_openweather/streams.py new file mode 100644 index 000000000000..d76d069b0790 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/source_openweather/streams.py @@ -0,0 +1,52 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Iterable, Mapping, MutableMapping, Optional + +import requests +from airbyte_cdk.sources.streams.http import HttpStream + + +class OneCall(HttpStream): + + cursor_field = ["current", "dt"] + url_base = "https://api.openweathermap.org/data/2.5/" + primary_key = None + + def __init__(self, appid: str, lat: float, lon: float, lang: str = None, units: str = None): + super().__init__() + self.appid = appid + self.lat = lat + self.lon = lon + self.lang = lang + self.units = units + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "onecall" + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return None + + def request_params(self, **kwargs) -> MutableMapping[str, Any]: + params = {"appid": self.appid, "lat": self.lat, "lon": self.lon, "lang": self.lang, "units": self.units} + params = {k: v for k, v in params.items() if v is not None} + return params + + def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: + data = response.json() + if data["current"]["dt"] >= stream_state.get("dt", 0): + return [data] + else: + return [] + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]): + current_stream_state = current_stream_state or {"dt": 0} + current_stream_state["dt"] = max(latest_record["current"]["dt"], current_stream_state["dt"]) + return current_stream_state + + def should_retry(self, response: requests.Response) -> bool: + # Do not retry in case of 429 because the account is blocked for an unknown duration. + return 500 <= response.status_code < 600 diff --git a/airbyte-integrations/connectors/source-openweather/unit_tests/__init__.py b/airbyte-integrations/connectors/source-openweather/unit_tests/__init__.py new file mode 100644 index 000000000000..46b7376756ec --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-openweather/unit_tests/test_extra_validations.py b/airbyte-integrations/connectors/source-openweather/unit_tests/test_extra_validations.py new file mode 100644 index 000000000000..b1b8265d6d07 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/unit_tests/test_extra_validations.py @@ -0,0 +1,64 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import pytest +from source_openweather import extra_validations + + +@pytest.mark.parametrize( + "lat_value, error_message", + [ + ("1", None), + (1, None), + ("-12.3", None), + ("-91", "Wrong value for lat, it must be between -90 and 90"), + ("91", "Wrong value for lat, it must be between -90 and 90"), + ("1,2", "Wrong value for lat, it must be a decimal number between -90 and 90"), + ("foo", "Wrong value for lat, it must be a decimal number between -90 and 90"), + (["not_string"], "Wrong value for lat, it must be a decimal number between -90 and 90"), + ], +) +def test_check_lat(lat_value, error_message): + if error_message: + with pytest.raises(Exception, match=error_message): + extra_validations.check_lat(lat_value) + else: + assert extra_validations.check_lat(lat_value) == float(lat_value) + + +@pytest.mark.parametrize( + "lon_value, error_message", + [ + ("1", None), + (1, None), + ("-92.3", None), + ("-191", "Wrong value for lon, it must be between -180 and 180"), + ("191", "Wrong value for lon, it must be between -180 and 180"), + ("1,2", "Wrong value for lon, it must be a decimal number between -180 and 180"), + ("foo", "Wrong value for lon, it must be a decimal number between -180 and 180"), + (["not_string"], "Wrong value for lon, it must be a decimal number between -180 and 180"), + ], +) +def test_check_lon(lon_value, error_message): + if error_message: + with pytest.raises(Exception, match=error_message): + extra_validations.check_lon(lon_value) + else: + assert extra_validations.check_lon(lon_value) == float(lon_value) + + +def test_validate(mocker): + check_lat_mock = mocker.patch("source_openweather.extra_validations.check_lat") + check_lat_mock.return_value = 1.0 + check_lon_mock = mocker.patch("source_openweather.extra_validations.check_lon") + check_lon_mock.return_value = 1.0 + + config_to_validate = {"appid": "foo", "lat": "1", "lon": "1"} + expected_valid_config = {"appid": "foo", "lat": 1.0, "lon": 1.0} + + valid_config = extra_validations.validate(config_to_validate) + assert isinstance(valid_config, dict) + assert valid_config == expected_valid_config + check_lat_mock.assert_called_with("1") + check_lon_mock.assert_called_with("1") diff --git a/airbyte-integrations/connectors/source-openweather/unit_tests/test_source.py b/airbyte-integrations/connectors/source-openweather/unit_tests/test_source.py new file mode 100644 index 000000000000..d1d5d999ddd5 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/unit_tests/test_source.py @@ -0,0 +1,58 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +import pytest +from source_openweather.source import SourceOpenWeather +from source_openweather.streams import OneCall + + +@pytest.mark.parametrize( + "response_status", + [200, 400], +) +def test_check_connection(mocker, response_status): + validate_mock = mocker.patch("source_openweather.extra_validations.validate") + validate_mock.return_value = {"appid": "test_appid", "lat": 1.0, "lon": 1.0, "lang": None, "units": None} + requests_get_mock = mocker.patch("source_openweather.source.requests.get") + requests_get_mock.return_value.status_code = response_status + logger_mock = MagicMock() + config_mock = MagicMock() + + source = SourceOpenWeather() + if response_status == 200: + assert source.check_connection(logger_mock, config_mock) == (True, None) + else: + assert source.check_connection(logger_mock, config_mock) == (False, requests_get_mock.return_value.json.return_value.get("message")) + validate_mock.assert_called_with(config_mock) + requests_get_mock.assert_called_with( + "https://api.openweathermap.org/data/2.5/onecall", params={"appid": "test_appid", "lat": 1.0, "lon": 1.0} + ) + + +def test_check_connection_validation_error(mocker): + validate_mock = mocker.patch("source_openweather.extra_validations.validate") + error = Exception("expected message") + validate_mock.side_effect = error + logger_mock = MagicMock() + + source = SourceOpenWeather() + assert source.check_connection(logger_mock, {}) == (False, error) + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(OneCall, "__abstractmethods__", set()) + + +def test_streams(patch_base_class, mocker): + config_mock = MagicMock() + validate_mock = mocker.patch("source_openweather.source.extra_validations.validate") + source = SourceOpenWeather() + streams = source.streams(config_mock) + expected_streams_number = 1 + assert len(streams) == expected_streams_number + validate_mock.assert_called_with(config_mock) diff --git a/airbyte-integrations/connectors/source-openweather/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-openweather/unit_tests/test_streams.py new file mode 100644 index 000000000000..65b892a4e9b9 --- /dev/null +++ b/airbyte-integrations/connectors/source-openweather/unit_tests/test_streams.py @@ -0,0 +1,68 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_openweather.streams import OneCall + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(OneCall, "__abstractmethods__", set()) + + +@pytest.mark.parametrize( + ("stream", "expected_params"), + [ + (OneCall(appid="test_appid", lat=1.0, lon=1.0), {"appid": "test_appid", "lat": 1.0, "lon": 1.0}), + ( + OneCall(appid="test_appid", lat=1.0, lon=1.0, lang=None, units=None), + {"appid": "test_appid", "lat": 1.0, "lon": 1.0}, + ), + ( + OneCall(appid="test_appid", lat=1.0, lon=1.0, lang="fr", units="metric"), + {"appid": "test_appid", "lat": 1.0, "lon": 1.0, "lang": "fr", "units": "metric"}, + ), + ], +) +def test_request_params(stream, expected_params): + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + assert stream.request_params(**inputs) == expected_params + + +@pytest.mark.parametrize( + ("response_data", "stream_state", "expect_record"), + [ + ({"current": {"dt": 1}}, {}, True), + ({"current": {"dt": 2}}, {"dt": 1}, True), + ({"current": {"dt": 1}}, {"dt": 2}, False), + ], +) +def test_parse_response(patch_base_class, response_data, stream_state, expect_record): + stream = OneCall(appid="test_appid", lat=1.0, lon=1.0) + response_mock = MagicMock() + response_mock.json.return_value = response_data + if expect_record: + assert stream.parse_response(response=response_mock, stream_state=stream_state) == [response_mock.json.return_value] + else: + assert stream.parse_response(response=response_mock, stream_state=stream_state) == [] + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, False), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = OneCall(appid="test_appid", lat=1.0, lon=1.0) + assert stream.should_retry(response_mock) == should_retry diff --git a/airbyte-integrations/connectors/source-oracle-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-oracle-strict-encrypt/Dockerfile index 0b89e009ff95..88082b4b59c1 100644 --- a/airbyte-integrations/connectors/source-oracle-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-oracle-strict-encrypt/Dockerfile @@ -5,9 +5,7 @@ WORKDIR /airbyte ENV APPLICATION source-oracle-strict-encrypt ENV TZ UTC -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.1 LABEL io.airbyte.name=airbyte/source-oracle-strict-encrypt \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile b/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile index ef14562d1873..d0f4b175fa16 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile +++ b/airbyte-integrations/connectors/source-paypal-transaction/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.2 +LABEL io.airbyte.version=0.1.3 LABEL io.airbyte.name=airbyte/source-paypal-transaction diff --git a/airbyte-integrations/connectors/source-paypal-transaction/README.md b/airbyte-integrations/connectors/source-paypal-transaction/README.md index 98e55ff6675a..d4fa2d383d78 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/README.md +++ b/airbyte-integrations/connectors/source-paypal-transaction/README.md @@ -99,7 +99,8 @@ Customize `acceptance-test-config.yml` file to configure tests. See [Source Acce If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. To run your integration tests with acceptance tests, from the connector root, run ``` -python -m pytest integration_tests -p integration_tests.acceptance +docker build . --no-cache -t airbyte/source-paypal-transaction:dev \ +&& python -m pytest -p source_acceptance_test.plugin ``` To run your integration tests with docker diff --git a/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/source.py b/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/source.py index 766be4255c8e..e05f656f75b4 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/source.py +++ b/airbyte-integrations/connectors/source-paypal-transaction/source_paypal_transaction/source.py @@ -2,6 +2,7 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # +import json import logging import time from abc import ABC @@ -16,6 +17,30 @@ from dateutil.parser import isoparse +class PaypalHttpException(Exception): + """HTTPError Exception with detailed info""" + + def __init__(self, error: requests.exceptions.HTTPError): + self.error = error + + def __str__(self): + message = repr(self.error) + + if self.error.response.content: + content = self.error.response.content.decode() + try: + details = json.loads(content) + except json.decoder.JSONDecodeError: + details = content + + message = f"{message} Details: {details}" + + return message + + def __repr__(self): + return self.__str__() + + def get_endpoint(is_sandbox: bool = False) -> str: if is_sandbox: endpoint = "https://api-m.sandbox.paypal.com" @@ -227,6 +252,12 @@ def stream_slices( return slices + def _send_request(self, request: requests.PreparedRequest, request_kwargs: Mapping[str, Any]) -> requests.Response: + try: + return super()._send_request(request, request_kwargs) + except requests.exceptions.HTTPError as http_error: + raise PaypalHttpException(http_error) + class Transactions(PaypalTransactionStream): """List Paypal Transactions on a specific date range @@ -351,11 +382,28 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: # Try to initiate a stream and validate input date params try: + # validate input date ranges Transactions(authenticator=authenticator, **config).validate_input_dates() - except Exception as e: - return False, e - return True, None + # validate if Paypal API is able to extract data for given start_data + start_date = isoparse(config["start_date"]) + end_date = start_date + timedelta(days=1) + stream_slice = { + "start_date": start_date.isoformat(), + "end_date": end_date.isoformat(), + } + records = Transactions(authenticator=authenticator, **config).read_records( + sync_mode=None, + stream_slice=stream_slice + ) + # Try to read one value from records iterator + next(records, None) + return True, None + except Exception as e: + if "Data for the given start date is not available" in repr(e): + return False, f"Data for the given start date ({config['start_date']}) is not available, please use more recent start date" + else: + return False, e def streams(self, config: Mapping[str, Any]) -> List[Stream]: """ diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile index e107453dd914..0c928a487654 100644 --- a/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-postgres-strict-encrypt -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/source-postgres-strict-encrypt diff --git a/airbyte-integrations/connectors/source-postgres/Dockerfile b/airbyte-integrations/connectors/source-postgres/Dockerfile index 921da976c8bc..991ac2b9f5d5 100644 --- a/airbyte-integrations/connectors/source-postgres/Dockerfile +++ b/airbyte-integrations/connectors/source-postgres/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-postgres -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.3.17 LABEL io.airbyte.name=airbyte/source-postgres diff --git a/airbyte-integrations/connectors/source-postgres/README.md b/airbyte-integrations/connectors/source-postgres/README.md index 87d2d41f7f03..6ba53763dd69 100644 --- a/airbyte-integrations/connectors/source-postgres/README.md +++ b/airbyte-integrations/connectors/source-postgres/README.md @@ -2,6 +2,16 @@ ## Performance Test +### Running performance tests with CPU and Memory limits for the container + +In order to run performance tests with CPU and Memory limits, you need to run the performance test start command with +additional parameters **cpulimit=cpulimit/YOU_CPU_LIMIT** and **memorylimit=memorylimit/YOU_MEMORY_LIMIT**. +**YOU_MEMORY_LIMIT** - RAM limit. Be sure to indicate the limit in MB or GB at the end. Minimum size - 6MB. +**YOU_CPU_LIMIT** - CPU limit. Minimum size - 2. +These parameters are optional and can be used separately from each other. +For example, if you use only **memorylimit=memorylimit/2GB**, only the memory limit for the container will be set, the CPU will not be limited. +Also, if you do not use both of these parameters, then performance tests will run without memory and CPU limitations. + ### Use Postgres script to populate the benchmark database In order to create a database with a certain number of tables, and a certain number of records in each of them, diff --git a/airbyte-integrations/connectors/source-redshift/Dockerfile b/airbyte-integrations/connectors/source-redshift/Dockerfile index 90fec9ee4931..b3f31198cf07 100644 --- a/airbyte-integrations/connectors/source-redshift/Dockerfile +++ b/airbyte-integrations/connectors/source-redshift/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-redshift -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.3.4 LABEL io.airbyte.name=airbyte/source-redshift diff --git a/airbyte-integrations/connectors/source-relational-db/Dockerfile b/airbyte-integrations/connectors/source-relational-db/Dockerfile index 0fa1991c49e5..ccd81c20d0dc 100644 --- a/airbyte-integrations/connectors/source-relational-db/Dockerfile +++ b/airbyte-integrations/connectors/source-relational-db/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-relational-db -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.3.0 LABEL io.airbyte.name=airbyte/source-relational-db diff --git a/airbyte-integrations/connectors/source-scaffold-java-jdbc/Dockerfile b/airbyte-integrations/connectors/source-scaffold-java-jdbc/Dockerfile index 76937a24e3cc..e25431ab66a8 100644 --- a/airbyte-integrations/connectors/source-scaffold-java-jdbc/Dockerfile +++ b/airbyte-integrations/connectors/source-scaffold-java-jdbc/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-scaffold-java-jdbc -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte # Airbyte's build system uses these labels to know what to name and tag the docker images produced by this Dockerfile. LABEL io.airbyte.version=0.1.0 diff --git a/airbyte-integrations/connectors/source-shopify/Dockerfile b/airbyte-integrations/connectors/source-shopify/Dockerfile index 50a92a81ee45..48a52243bbf4 100644 --- a/airbyte-integrations/connectors/source-shopify/Dockerfile +++ b/airbyte-integrations/connectors/source-shopify/Dockerfile @@ -28,5 +28,5 @@ COPY source_shopify ./source_shopify ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.25 +LABEL io.airbyte.version=0.1.26 LABEL io.airbyte.name=airbyte/source-shopify diff --git a/airbyte-integrations/connectors/source-shopify/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-shopify/integration_tests/abnormal_state.json index 3274886656c3..e4a7d9adcbb2 100644 --- a/airbyte-integrations/connectors/source-shopify/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-shopify/integration_tests/abnormal_state.json @@ -23,10 +23,10 @@ "custom_collections": { "updated_at": "2024-07-19T07:01:37-07:00" }, - "orders_refunds": { + "order_refunds": { "created_at": "2024-07-19T06:41:47-07:00" }, - "orders_risks": { + "order_risks": { "id": 9991307599038 }, "transactions": { diff --git a/airbyte-integrations/connectors/source-shopify/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-shopify/integration_tests/configured_catalog.json index 8631f030275b..c9cc07b41ad1 100644 --- a/airbyte-integrations/connectors/source-shopify/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-shopify/integration_tests/configured_catalog.json @@ -98,7 +98,7 @@ }, { "stream": { - "name": "orders_refunds", + "name": "order_refunds", "json_schema": {}, "supported_sync_modes": ["incremental", "full_refresh"], "source_defined_cursor": true, @@ -110,7 +110,7 @@ }, { "stream": { - "name": "orders_risks", + "name": "order_risks", "json_schema": {}, "supported_sync_modes": ["incremental", "full_refresh"], "source_defined_cursor": true, diff --git a/airbyte-integrations/connectors/source-shopify/integration_tests/state.json b/airbyte-integrations/connectors/source-shopify/integration_tests/state.json index 059da172e51f..5c9988b0f283 100644 --- a/airbyte-integrations/connectors/source-shopify/integration_tests/state.json +++ b/airbyte-integrations/connectors/source-shopify/integration_tests/state.json @@ -23,7 +23,7 @@ "custom_collections": { "updated_at": "2021-08-18T02:39:34-07:00" }, - "orders_refunds": { + "order_refunds": { "created_at": "2021-09-09T02:57:43-07:00" }, "orders_risks": { diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/abandoned_checkouts.json b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/abandoned_checkouts.json index 8b07ab63fbd7..6e2c0e6cfc52 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/abandoned_checkouts.json +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/abandoned_checkouts.json @@ -93,8 +93,7 @@ "type": ["null", "string"] }, "amount": { - "type": ["null", "number"], - "multipleOf": 1e-10 + "type": ["null", "string"] }, "code": { "type": ["null", "string"] @@ -212,357 +211,38 @@ "line_items": { "items": { "properties": { - "applied_discounts": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "total_discount_set": { - "type": ["null", "object"], - "properties": { - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - } - } - }, - "pre_tax_price_set": { - "properties": { - "shop_money": { - "properties": { - "currency_code": { - "type": ["null", "string"] - }, - "amount": { - "type": ["null", "number"] - } - }, - "type": ["null", "object"] - }, - "presentment_money": { - "properties": { - "currency_code": { - "type": ["null", "string"] - }, - "amount": { - "type": ["null", "number"] - } - }, - "type": ["null", "object"] - } - }, - "type": ["null", "object"] - }, - "price_set": { - "type": ["null", "object"], - "properties": { - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - } - } + "sku": { + "type": ["null", "string"] }, "grams": { - "type": ["null", "integer"] - }, - "compare_at_price": { "type": ["null", "number"] }, - "destination_location_id": { - "type": ["null", "integer"] + "price": { + "type": ["null", "string"] }, - "key": { + "title": { "type": ["null", "string"] }, - "line_price": { - "type": ["null", "number"] + "vendor": { + "type": ["null", "string"] }, - "origin_location_id": { + "quantity": { "type": ["null", "integer"] }, - "applied_discount": { + "product_id": { "type": ["null", "integer"] }, - "fulfillable_quantity": { + "variant_id": { "type": ["null", "integer"] }, "variant_title": { "type": ["null", "string"] }, - "properties": { - "anyOf": [ - { - "items": { - "properties": { - "name": { - "type": ["null", "string"] - }, - "value": { - "type": ["null", "string"] - } - }, - "type": ["null", "object"] - }, - "type": ["null", "array"] - }, - { - "type": ["null", "object"], - "additionalProperties": true - } - ] - }, - "tax_code": { - "type": ["null", "string"] - }, - "discount_allocations": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "discount_application_index": { - "type": ["null", "integer"] - }, - "amount_set": { - "type": ["null", "object"], - "properties": { - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "string"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "string"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - } - } - }, - "amount": { - "type": ["null", "number"] - } - } - } - }, - "admin_graphql_api_id": { - "type": ["null", "string"] - }, - "pre_tax_price": { - "type": ["null", "number"] - }, - "sku": { - "type": ["null", "string"] - }, - "product_exists": { - "type": ["null", "boolean"] - }, - "total_discount": { - "type": ["null", "number"], - "multipleOf": 1e-10 - }, - "name": { - "type": ["null", "string"] - }, - "fulfillment_status": { - "type": ["null", "string"] - }, - "gift_card": { - "type": ["null", "boolean"] - }, - "id": { - "type": ["null", "integer", "string"] - }, - "taxable": { - "type": ["null", "boolean"] - }, - "vendor": { - "type": ["null", "string"] - }, - "tax_lines": { - "items": { - "properties": { - "price_set": { - "type": ["null", "object"], - "properties": { - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - } - } - }, - "price": { - "type": ["null", "number"] - }, - "title": { - "type": ["null", "string"] - }, - "rate": { - "type": ["null", "number"] - }, - "compare_at": { - "type": ["null", "number"] - }, - "position": { - "type": ["null", "integer"] - }, - "source": { - "type": ["null", "string"] - }, - "zone": { - "type": ["null", "string"] - } - }, - "type": ["null", "object"] - }, - "type": ["null", "array"] - }, - "origin_location": { - "properties": { - "country_code": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "address1": { - "type": ["null", "string"] - }, - "city": { - "type": ["null", "string"] - }, - "id": { - "type": ["null", "integer"] - }, - "address2": { - "type": ["null", "string"] - }, - "province_code": { - "type": ["null", "string"] - }, - "zip": { - "type": ["null", "string"] - } - }, - "type": ["null", "object"] - }, - "price": { - "type": ["null", "number"] - }, "requires_shipping": { "type": ["null", "boolean"] }, "fulfillment_service": { "type": ["null", "string"] - }, - "variant_inventory_management": { - "type": ["null", "string"] - }, - "title": { - "type": ["null", "string"] - }, - "destination_location": { - "properties": { - "country_code": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "address1": { - "type": ["null", "string"] - }, - "city": { - "type": ["null", "string"] - }, - "id": { - "type": ["null", "integer"] - }, - "address2": { - "type": ["null", "string"] - }, - "province_code": { - "type": ["null", "string"] - }, - "zip": { - "type": ["null", "string"] - } - }, - "type": ["null", "object"] - }, - "quantity": { - "type": ["null", "integer"] - }, - "product_id": { - "type": ["null", "integer"] - }, - "variant_id": { - "type": ["null", "integer"] } }, "type": ["null", "object"] diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/draft_orders.json b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/draft_orders.json index 83aaa02386cb..4a6c86820999 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/draft_orders.json +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/draft_orders.json @@ -4,653 +4,87 @@ "id": { "type": ["null", "integer"] }, - "order_id": { - "type": ["null", "integer"] - }, - "name": { - "type": ["null", "string"] - }, - "customer": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "integer"] - }, - "email": { - "type": ["null", "string"] - }, - "accepts_marketing": { - "type": ["null", "boolean"] - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "first_name": { - "type": ["null", "string"] - }, - "last_name": { - "type": ["null", "string"] - }, - "orders_count": { - "type": ["null", "integer"] - }, - "state": { - "type": ["null", "string"] - }, - "total_spent": { - "type": ["null", "number"] - }, - "last_order_id": { - "type": ["null", "integer"] - }, - "note": { - "type": ["null", "string"] - }, - "verified_email": { - "type": ["null", "boolean"] - }, - "multipass_identifier": { - "type": ["null", "string"] - }, - "phone": { - "type": ["null", "string"] - }, - "tags": { - "type": ["null", "string"] - }, - "last_order_name": { - "type": ["null", "string"] - }, - "currency": { - "type": ["null", "string"] - }, - "accepts_marketing_updated_at": { - "type": ["null", "string"] - }, - "marketing_opt_in_level": { - "type": ["null", "string"] - }, - "admin_graphql_api_id": { - "type": ["null", "string"] - }, - "default_address": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "integer"] - }, - "customer_id": { - "type": ["null", "integer"] - }, - "first_name": { - "type": ["null", "string"] - }, - "last_name": { - "type": ["null", "string"] - }, - "company": { - "type": ["null", "string"] - }, - "address1": { - "type": ["null", "string"] - }, - "address2": { - "type": ["null", "string"] - }, - "city": { - "type": ["null", "string"] - }, - "province": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "zip": { - "type": ["null", "string"] - }, - "phone": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "province_code": { - "type": ["null", "string"] - }, - "country_code": { - "type": ["null", "string"] - }, - "country_name": { - "type": ["null", "string"] - }, - "default": { - "type": ["null", "boolean"] - } - } - } - } - }, - "shipping_address": { - "properties": { - "phone": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "address1": { - "type": ["null", "string"] - }, - "longitude": { - "type": ["null", "number"] - }, - "address2": { - "type": ["null", "string"] - }, - "last_name": { - "type": ["null", "string"] - }, - "first_name": { - "type": ["null", "string"] - }, - "province": { - "type": ["null", "string"] - }, - "city": { - "type": ["null", "string"] - }, - "company": { - "type": ["null", "string"] - }, - "latitude": { - "type": ["null", "number"] - }, - "country_code": { - "type": ["null", "string"] - }, - "province_code": { - "type": ["null", "string"] - }, - "zip": { - "type": ["null", "string"] - } - }, - "type": ["null", "object"] - }, - "billing_address": { - "properties": { - "phone": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "address1": { - "type": ["null", "string"] - }, - "longitude": { - "type": ["null", "number"] - }, - "address2": { - "type": ["null", "string"] - }, - "last_name": { - "type": ["null", "string"] - }, - "first_name": { - "type": ["null", "string"] - }, - "province": { - "type": ["null", "string"] - }, - "city": { - "type": ["null", "string"] - }, - "company": { - "type": ["null", "string"] - }, - "latitude": { - "type": ["null", "number"] - }, - "country_code": { - "type": ["null", "string"] - }, - "province_code": { - "type": ["null", "string"] - }, - "zip": { - "type": ["null", "string"] - } - }, - "type": ["null", "object"] - }, - "note": { - "type": ["null", "string"] - }, - "note_attributes": { - "items": { - "properties": { - "name": { - "type": ["null", "string"] - }, - "value": { - "type": ["null", "string"] - } - }, - "type": ["null", "object"] - }, - "type": ["null", "array"] - }, - "email": { - "type": ["null", "string"] - }, - "currency": { - "type": ["null", "string"] - }, - "invoice_sent_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "invoice_url": { - "type": ["null", "string"] - }, - "line_items": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "integer"] - }, - "admin_graphql_api_id": { - "type": ["null", "string"] - }, - "destination_location": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "integer"] - }, - "country_code": { - "type": ["null", "string"] - }, - "province_code": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "address1": { - "type": ["null", "string"] - }, - "address2": { - "type": ["null", "string"] - }, - "city": { - "type": ["null", "string"] - }, - "zip": { - "type": ["null", "string"] - } - } - }, - "fulfillable_quantity": { - "type": ["null", "integer"] - }, - "fulfillment_service": { - "type": ["null", "string"] - }, - "fulfillment_status": { - "type": ["null", "string"] - }, - "gift_card": { - "type": ["null", "boolean"] - }, - "grams": { - "type": ["null", "integer"] - }, - "name": { - "type": ["null", "string"] - }, - "origin_location": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "integer"] - }, - "country_code": { - "type": ["null", "string"] - }, - "province_code": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "address1": { - "type": ["null", "string"] - }, - "address2": { - "type": ["null", "string"] - }, - "city": { - "type": ["null", "string"] - }, - "zip": { - "type": ["null", "string"] - } - } - }, - "price": { - "type": ["null", "number"] - }, - "price_set": { - "type": ["null", "object"], - "properties": { - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - } - } - }, - "product_exists": { - "type": ["null", "boolean"] - }, - "product_id": { - "type": ["null", "integer"] - }, - "properties": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "name": { - "type": ["null", "string"] - }, - "value": { - "type": ["null", "string"] - } - } - }, - "quantity": { - "type": ["null", "integer"] - }, - "requires_shipping": { - "type": ["null", "boolean"] - }, - "sku": { - "type": ["null", "string"] - }, - "taxable": { - "type": ["null", "boolean"] - }, - "title": { - "type": ["null", "string"] - }, - "total_discount": { - "type": ["null", "number"] - }, - "total_discount_set": { - "type": ["null", "object"], - "properties": { - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - } - } - }, - "variant_id": { - "type": ["null", "integer"] - }, - "variant_inventory_management": { - "type": ["null", "string"] - }, - "variant_title": { - "type": ["null", "string"] - }, - "vendor": { - "type": ["null", "string"] - }, - "tax_lines": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "price": { - "type": ["null", "number"] - }, - "price_set": { - "type": ["null", "object"], - "properties": { - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - } - } - }, - "rate": { - "type": ["null", "number"] - }, - "title": { - "type": ["null", "string"] - } - } - } - }, - "duties": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "string"] - }, - "harmonized_system_code": { - "type": ["null", "string"] - }, - "country_code_of_origin": { - "type": ["null", "string"] - }, - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "string"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "string"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "tax_lines": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "title": { - "type": ["null", "string"] - }, - "price": { - "type": ["null", "string"] - }, - "rate": { - "type": ["null", "number"] - }, - "price_set": { - "type": ["null", "object"], - "properties": { - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "string"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "string"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - } - } - }, - "channel_liable": { - "type": ["null", "boolean"] - } - } - } - }, - "admin_graphql_api_id": { - "type": ["null", "string"] - } - } - } - }, - "discount_allocations": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "string"] - }, - "amount": { - "type": ["null", "string"] - }, - "description": { - "type": ["null", "string"] - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "discount_application_index": { - "type": ["null", "number"] - }, - "amount_set": { - "type": ["null", "object"], - "properties": { - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "string"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "string"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - } - } - }, - "application_type": { - "type": ["null", "string"] - } - } - } - } - } - } - }, - "shipping_line": { + "note": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "taxes_included": { + "type": ["null", "boolean"] + }, + "currency": { + "type": ["null", "string"] + }, + "invoice_sent_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "tax_exempt": { + "type": ["null", "boolean"] + }, + "completed_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "name": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "line_items": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], "properties": { + "id": { + "type": ["null", "integer"] + }, + "variant_id": { + "type": ["null", "integer"] + }, + "product_id": { + "type": ["null", "integer"] + }, + "title": { + "type": ["null", "string"] + }, + "variant_title": { + "type": ["null", "string"] + }, + "sku": { + "type": ["null", "string"] + }, + "vendor": { + "type": ["null", "string"] + }, + "quantity": { + "type": ["null", "integer"] + }, + "requires_shipping": { + "type": ["null", "boolean"] + }, + "taxable": { + "type": ["null", "boolean"] + }, + "gift_card": { + "type": ["null", "boolean"] + }, + "fulfillment_service": { + "type": ["null", "string"] + }, + "grams": { + "type": ["null", "number"] + }, "tax_lines": { "type": ["null", "array"], "items": { @@ -695,35 +129,211 @@ } } }, - "phone": { - "type": ["null", "string"] + "applied_discount": { + "type": ["null", "object"], + "properties": { + "description": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + }, + "amount": { + "type": ["null", "string"] + }, + "value_type": { + "type": ["null", "string"] + } + } + }, + "name": { + "type": ["null", "string"] + }, + "properties": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + } + } + }, + "custom": { + "type": ["null", "boolean"] + }, + "price": { + "type": ["null", "number"] + }, + "admin_graphql_api_id": { + "type": ["null", "string"] + } + } + } + }, + "shipping_address": { + "properties": { + "phone": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "address1": { + "type": ["null", "string"] + }, + "longitude": { + "type": ["null", "number"] + }, + "address2": { + "type": ["null", "string"] + }, + "last_name": { + "type": ["null", "string"] + }, + "first_name": { + "type": ["null", "string"] + }, + "province": { + "type": ["null", "string"] + }, + "city": { + "type": ["null", "string"] + }, + "company": { + "type": ["null", "string"] + }, + "latitude": { + "type": ["null", "number"] + }, + "country_code": { + "type": ["null", "string"] + }, + "province_code": { + "type": ["null", "string"] + }, + "zip": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"] + }, + "billing_address": { + "properties": { + "phone": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "address1": { + "type": ["null", "string"] + }, + "longitude": { + "type": ["null", "number"] + }, + "address2": { + "type": ["null", "string"] + }, + "last_name": { + "type": ["null", "string"] + }, + "first_name": { + "type": ["null", "string"] + }, + "province": { + "type": ["null", "string"] + }, + "city": { + "type": ["null", "string"] + }, + "company": { + "type": ["null", "string"] + }, + "latitude": { + "type": ["null", "number"] + }, + "country_code": { + "type": ["null", "string"] + }, + "province_code": { + "type": ["null", "string"] + }, + "zip": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"] + }, + "invoice_url": { + "type": ["null", "string"] + }, + "applied_discount": { + "type": ["null", "object"], + "properties": { + "description": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + }, + "amount": { + "type": ["null", "string"] + }, + "value_type": { + "type": ["null", "string"] + } + } + }, + "order_id": { + "type": ["null", "integer"] + }, + "shipping_line": { + "properties": { + "price": { + "type": ["null", "number"] + }, + "title": { + "type": ["null", "string"] + }, + "custom": { + "type": ["null", "boolean"] + }, + "handle": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"] + }, + "tax_lines": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "price": { + "type": ["null", "number"] + }, + "rate": { + "type": ["null", "number"] }, - "discounted_price_set": { - "type": ["null", "object"], - "properties": { - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - } - } + "title": { + "type": ["null", "string"] }, "price_set": { "type": ["null", "object"], @@ -751,154 +361,173 @@ } } } - }, - "price": { - "type": ["null", "number"] - }, - "title": { - "type": ["null", "string"] - }, - "discount_allocations": { - "items": { - "properties": { - "discount_application_index": { - "type": ["null", "integer"] - }, - "amount": { - "type": ["null", "number"] - } - }, - "type": ["null", "object"] - }, - "type": ["null", "array"] - }, - "delivery_category": { - "type": ["null", "string"] - }, - "discounted_price": { - "type": ["null", "number"] - }, - "code": { - "type": ["null", "string"] - }, - "requested_fulfillment_service_id": { - "type": ["null", "string"] - }, - "carrier_identifier": { + } + } + } + }, + "tags": { + "type": ["null", "string"] + }, + "note_attributes": { + "items": { + "properties": { + "name": { "type": ["null", "string"] }, - "id": { - "type": ["null", "integer"] - }, - "source": { + "value": { "type": ["null", "string"] } }, "type": ["null", "object"] }, - "tags": { - "type": ["null", "string"] - }, - "tax_exemptions": { - "type": ["null", "boolean"] - }, - "tax_lines": { - "type": ["null", "array"], - "items": { + "type": ["null", "array"] + }, + "total_price": { + "type": ["null", "string"] + }, + "subtotal_price": { + "type": ["null", "string"] + }, + "total_tax": { + "type": ["null", "string"] + }, + "admin_graphql_api_id": { + "type": ["null", "string"] + }, + "customer": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "integer"] + }, + "email": { + "type": ["null", "string"] + }, + "accepts_marketing": { + "type": ["null", "boolean"] + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "first_name": { + "type": ["null", "string"] + }, + "last_name": { + "type": ["null", "string"] + }, + "orders_count": { + "type": ["null", "integer"] + }, + "state": { + "type": ["null", "string"] + }, + "total_spent": { + "type": ["null", "number"] + }, + "last_order_id": { + "type": ["null", "integer"] + }, + "note": { + "type": ["null", "string"] + }, + "verified_email": { + "type": ["null", "boolean"] + }, + "multipass_identifier": { + "type": ["null", "string"] + }, + "tax_exempt": { + "type": ["null", "boolean"] + }, + "phone": { + "type": ["null", "string"] + }, + "tags": { + "type": ["null", "string"] + }, + "last_order_name": { + "type": ["null", "string"] + }, + "currency": { + "type": ["null", "string"] + }, + "accepts_marketing_updated_at": { + "type": ["null", "string"] + }, + "marketing_opt_in_level": { + "type": ["null", "string"] + }, + "tax_exemptions": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "admin_graphql_api_id": { + "type": ["null", "string"] + }, + "default_address": { "type": ["null", "object"], "properties": { - "price": { - "type": ["null", "number"] + "id": { + "type": ["null", "integer"] }, - "rate": { - "type": ["null", "number"] + "customer_id": { + "type": ["null", "integer"] }, - "title": { + "first_name": { "type": ["null", "string"] }, - "price_set": { - "type": ["null", "object"], - "properties": { - "shop_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - }, - "presentment_money": { - "type": ["null", "object"], - "properties": { - "amount": { - "type": ["null", "number"] - }, - "currency_code": { - "type": ["null", "string"] - } - } - } - } + "last_name": { + "type": ["null", "string"] + }, + "company": { + "type": ["null", "string"] + }, + "address1": { + "type": ["null", "string"] + }, + "address2": { + "type": ["null", "string"] + }, + "city": { + "type": ["null", "string"] + }, + "province": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "zip": { + "type": ["null", "string"] + }, + "phone": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "province_code": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "country_name": { + "type": ["null", "string"] + }, + "default": { + "type": ["null", "boolean"] } } } - }, - "applied_discount": { - "type": ["null", "object"], - "properties": { - "description": { - "type": ["null", "string"] - }, - "value": { - "type": ["null", "string"] - }, - "title": { - "type": ["null", "string"] - }, - "amount": { - "type": ["null", "string"] - }, - "value_type": { - "type": ["null", "string"] - } - } - }, - "taxes_included": { - "type": ["null", "boolean"] - }, - "total_price": { - "type": ["null", "number"] - }, - "completed_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "status": { - "type": ["null", "string"] } - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "completed_at": { - "type": ["null", "string"], - "format": "date-time" } } } diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/metafields.json b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/metafields.json index 0c0c9b9c02ff..4277aeb4351a 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/metafields.json +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/metafields.json @@ -29,7 +29,7 @@ "type": ["null", "string"] }, "value": { - "type": ["null", "number", "string", "integer", "boolean"] + "type": ["null", "string"] }, "updated_at": { "type": ["null", "string"], diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/orders_refunds.json b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/order_refunds.json similarity index 86% rename from airbyte-integrations/connectors/source-shopify/source_shopify/schemas/orders_refunds.json rename to airbyte-integrations/connectors/source-shopify/source_shopify/schemas/order_refunds.json index e852ef31d321..608a420a4691 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/orders_refunds.json +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/order_refunds.json @@ -388,6 +388,79 @@ }, "type": ["null", "object"] } + }, + "transactions": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "integer"] + }, + "admin_graphql_api_id": { + "type": ["null", "string"] + }, + "amount": { + "type": ["null", "string"] + }, + "authorization": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "currency": { + "type": ["null", "string"] + }, + "device_id": { + "type": ["null", "integer"] + }, + "error_code": { + "type": ["null", "string"] + }, + "gateway": { + "type": ["null", "string"] + }, + "kind": { + "type": ["null", "string"] + }, + "location_id": { + "type": ["null", "integer"] + }, + "message": { + "type": ["null", "string"] + }, + "order_id": { + "type": ["null", "integer"] + }, + "parent_id": { + "type": ["null", "integer"] + }, + "processed_at": { + "type": ["null", "string"] + }, + "receipt": { + "type": ["null", "object"], + "properties": { + "paid_amount": { + "type": ["null", "string"] + } + } + }, + "source_name": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "test": { + "type": ["null", "boolean"] + }, + "user_id": { + "type": ["null", "integer"] + } + } + } } } } diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/orders_risks.json b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/order_risks.json similarity index 100% rename from airbyte-integrations/connectors/source-shopify/source_shopify/schemas/orders_risks.json rename to airbyte-integrations/connectors/source-shopify/source_shopify/schemas/order_risks.json diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/orders.json b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/orders.json index 70c3b987923c..6a95e3340266 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/orders.json +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/orders.json @@ -202,7 +202,21 @@ "type": ["null", "string"] }, "discount_codes": { - "type": ["null", "array"] + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "code": { + "type": ["null", "string"] + }, + "amount": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + } + } + } }, "email": { "type": ["null", "string"] @@ -818,13 +832,19 @@ "type": ["null", "string"] }, "tracking_numbers": { - "type": ["null", "array"] + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } }, "tracking_url": { "type": ["null", "string"] }, "tracking_urls": { - "type": ["null", "array"] + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } }, "updated_at": { "type": ["null", "string"], @@ -957,7 +977,18 @@ "type": ["null", "integer"] }, "properties": { - "type": ["null", "array"] + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + } + } }, "quantity": { "type": ["null", "integer"] @@ -1336,7 +1367,18 @@ "type": ["null", "integer"] }, "properties": { - "type": ["null", "array"] + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + } + } }, "quantity": { "type": ["null", "integer"] diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/shop.json b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/shop.json index 60613accc472..b4eb4fb815b2 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/shop.json +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/schemas/shop.json @@ -1,5 +1,5 @@ { - "type": "object", + "type": ["null", "object"], "properties": { "address1": { "type": ["null", "string"] diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/source.py b/airbyte-integrations/connectors/source-shopify/source_shopify/source.py index 3591fe3bf898..96c222ad8a94 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/source.py +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/source.py @@ -63,9 +63,14 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp # transform method was implemented according to issue 4841 # Shopify API returns price fields as a string and it should be converted to number # this solution designed to convert string into number, but in future can be modified for general purpose - for record in records: - yield self._transformer.transform(record) - + if isinstance(records, dict): + # for cases when we have a single record as dict + yield self._transformer.transform(records) + else: + # for other cases + for record in records: + yield self._transformer.transform(record) + @property @abstractmethod def data_field(self) -> str: @@ -141,12 +146,17 @@ class ChildSubstream(IncrementalShopifyStream): :: @ parent_stream_class - defines the parent stream object to read from :: @ slice_key - defines the name of the property in stream slices dict. - :: @ record_field_name - the name of the field inside of parent stream record. Default is `id`. + :: @ nested_record - the name of the field inside of parent stream record. Default is `id`. + :: @ nested_record_field_name - the name of the field inside of nested_record. + :: @ nested_substream - the name of the nested entity inside of parent stream, helps to reduce the number of + API Calls, if present, see `OrderRefunds` stream for more. """ parent_stream_class: object = None slice_key: str = None - record_field_name: str = "id" + nested_record: str = "id" + nested_record_field_name: str = None + nested_substream = None def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: params = {"limit": self.limit} @@ -157,14 +167,21 @@ def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: """ Reading the parent stream for slices with structure: - EXAMPLE: for given record_field_name as `id` of Orders, + EXAMPLE: for given nested_record as `id` of Orders, Output: [ {slice_key: 123}, {slice_key: 456}, ..., {slice_key: 999} ] """ parent_stream = self.parent_stream_class(self.config) parent_stream_state = stream_state_cache.cached_state.get(parent_stream.name) for record in parent_stream.read_records(stream_state=parent_stream_state, **kwargs): - yield {self.slice_key: record[self.record_field_name]} + # to limit the number of API Calls and reduce the time of data fetch, + # we can pull the ready data for child_substream, if nested data is present, + # and corresponds to the data of child_substream we need. + if self.nested_substream: + if record.get(self.nested_substream): + yield {self.slice_key: record[self.nested_record]} + else: + yield {self.slice_key: record[self.nested_record]} def read_records( self, @@ -174,7 +191,13 @@ def read_records( ) -> Iterable[Mapping[str, Any]]: """Reading child streams records for each `id`""" - self.logger.info(f"Reading {self.name} for {self.slice_key}: {stream_slice.get(self.slice_key)}") + slice_data = stream_slice.get(self.slice_key) + # sometimes the stream_slice.get(self.slice_key) has the list of records, + # to avoid data exposition inside the logs, we should get the data we need correctly out of stream_slice. + if isinstance(slice_data, list) and self.nested_record_field_name is not None and len(slice_data) > 0: + slice_data = slice_data[0].get(self.nested_record_field_name) + + self.logger.info(f"Reading {self.name} for {self.slice_key}: {slice_data}") records = super().read_records(stream_slice=stream_slice, **kwargs) yield from self.filter_records_newer_than_state(stream_state=stream_state, records_slice=records) @@ -256,20 +279,22 @@ def request_params( return params -class OrdersRefunds(ChildSubstream): +class OrderRefunds(ChildSubstream): parent_stream_class: object = Orders slice_key = "order_id" data_field = "refunds" cursor_field = "created_at" + # we pull out the records that we already know has the refunds data from Orders object + nested_substream = "refunds" def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: order_id = stream_slice["order_id"] return f"orders/{order_id}/{self.data_field}.json" -class OrdersRisks(ChildSubstream): +class OrderRisks(ChildSubstream): parent_stream_class: object = Orders slice_key = "order_id" @@ -364,12 +389,13 @@ class InventoryItems(ChildSubstream): parent_stream_class: object = Products slice_key = "id" - record_field_name = "variants" - + nested_record = "variants" + nested_record_field_name = "inventory_item_id" data_field = "inventory_items" def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - ids = ",".join(str(x["inventory_item_id"]) for x in stream_slice[self.slice_key]) + + ids = ",".join(str(x[self.nested_record_field_name]) for x in stream_slice[self.slice_key]) return f"inventory_items.json?ids={ids}" @@ -405,12 +431,6 @@ def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: class Shop(ShopifyStream): data_field = "shop" - @limiter.balance_rate_limit() - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - json_response = response.json() - record = json_response.get(self.data_field, []) if self.data_field is not None else json_response - return [record] - def path(self, **kwargs) -> str: return f"{self.data_field}.json" @@ -421,14 +441,13 @@ def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> """ Testing connection availability for the connector. """ - auth = ShopifyAuthenticator(config).get_auth_header() - api_version = "2021-07" # Latest Stable Release - url = f"https://{config['shop']}.myshopify.com/admin/api/{api_version}/shop.json" - + config["authenticator"] = ShopifyAuthenticator(config) try: - session = requests.get(url, headers=auth) - session.raise_for_status() - return True, None + responce = list(Shop(config).read_records(sync_mode=None)) + # check for the shop_id is present in the responce + shop_id = responce[0].get("id") + if shop_id is not None: + return True, None except requests.exceptions.RequestException as e: return False, e @@ -449,8 +468,8 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: Metafields(config), CustomCollections(config), Collects(config), - OrdersRefunds(config), - OrdersRisks(config), + OrderRefunds(config), + OrderRisks(config), Transactions(config), Pages(config), PriceRules(config), diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/spec.json b/airbyte-integrations/connectors/source-shopify/source_shopify/spec.json index fc4072470671..7f7f785fab0a 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/spec.json +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/spec.json @@ -24,7 +24,7 @@ { "type": "object", "title": "OAuth2.0", - "required": ["client_id", "client_secret", "access_token"], + "required": ["client_id", "client_secret", "access_token", "auth_method"], "properties": { "auth_method": { "type": "string", @@ -53,7 +53,7 @@ { "title": "API Password", "type": "object", - "required": ["api_password"], + "required": ["api_password", "auth_method"], "properties": { "auth_method": { "type": "string", diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/transform.py b/airbyte-integrations/connectors/source-shopify/source_shopify/transform.py index f73583333cac..8398d2096a8c 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/transform.py +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/transform.py @@ -64,6 +64,10 @@ def _first_non_null_type(schema_types: List[str]) -> str: def _transform_number(value: Any): return Decimal(value) + @staticmethod + def _transform_string(value: Any): + return str(value) + def _transform_array(self, array: List[Any], item_properties: Mapping[str, Any]): # iterate over items in array, compare schema types and convert if necessary. for index, record in enumerate(array): @@ -92,6 +96,8 @@ def transform(self, field: Any, schema: Mapping[str, Any] = None) -> Iterable[Mu if not any(field_json_type in schema_types for field_json_type in field_json_types): if schema_type == "number": return self._transform_number(field) + if schema_type == "string": + return self._transform_string(field) if schema_type == "object": properties = schema.get("properties", {}) return self._transform_object(field, properties) diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/Dockerfile b/airbyte-integrations/connectors/source-snapchat-marketing/Dockerfile index 3cf7ffea51d6..b3855ff5f3f3 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/Dockerfile +++ b/airbyte-integrations/connectors/source-snapchat-marketing/Dockerfile @@ -25,5 +25,5 @@ COPY source_snapchat_marketing ./source_snapchat_marketing ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/source-snapchat-marketing diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/spec.json b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/spec.json index d954f464e013..064694c71357 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/spec.json +++ b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/spec.json @@ -10,25 +10,25 @@ "client_id": { "title": "Client ID", "type": "string", - "description": "The Snapchat Client ID for API credentials.", + "description": "The Client ID of your Snapchat developer application.", "airbyte_secret": true }, "client_secret": { "title": "Client Secret", "type": "string", - "description": "The Client Secret for a given Client ID.", + "description": "The Client Secret of your Snapchat developer application.", "airbyte_secret": true }, "refresh_token": { - "title": "API Refresh Token", + "title": "Refresh Token", "type": "string", - "description": "Refresh Token to get next api key after expiration. Is given with API Key", + "description": "Refresh Token to renew the expired Access Token.", "airbyte_secret": true }, "start_date": { "title": "Start Date", "type": "string", - "description": "The start date to sync data. Leave blank for full sync. Format: YYYY-MM-DD.", + "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.", "examples": ["2021-01-01"], "default": "1970-01-01", "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" diff --git a/airbyte-integrations/connectors/source-snowflake/Dockerfile b/airbyte-integrations/connectors/source-snowflake/Dockerfile index 47435ff39edd..10a04c39373a 100644 --- a/airbyte-integrations/connectors/source-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/source-snowflake/Dockerfile @@ -4,9 +4,7 @@ WORKDIR /airbyte ENV APPLICATION source-snowflake -COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar - -RUN tar xf ${APPLICATION}.tar --strip-components=1 +ADD build/distributions/${APPLICATION}*.tar /airbyte LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/source-snowflake diff --git a/airbyte-integrations/connectors/source-strava/Dockerfile b/airbyte-integrations/connectors/source-strava/Dockerfile index 7f89793bc788..5b729efd234b 100644 --- a/airbyte-integrations/connectors/source-strava/Dockerfile +++ b/airbyte-integrations/connectors/source-strava/Dockerfile @@ -34,5 +34,5 @@ COPY source_strava ./source_strava ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/source-strava diff --git a/airbyte-integrations/connectors/source-strava/source_strava/spec.json b/airbyte-integrations/connectors/source-strava/source_strava/spec.json index 8ffba4373baa..5515b5896b90 100644 --- a/airbyte-integrations/connectors/source-strava/source_strava/spec.json +++ b/airbyte-integrations/connectors/source-strava/source_strava/spec.json @@ -13,6 +13,12 @@ ], "additionalProperties": false, "properties": { + "auth_type": { + "type": "string", + "const": "Client", + "enum": ["Client"], + "default": "Client" + }, "client_id": { "type": "string", "description": "The Client ID of your Strava developer application.", @@ -50,5 +56,48 @@ "examples": ["2016-12-31 23:59:59"] } } + }, + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["auth_type"], + "predicate_value": "Client", + "oauth_config_specification": { + "complete_oauth_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "refresh_token": { + "type": "string", + "path_in_connector_config": ["refresh_token"] + } + } + }, + "complete_oauth_server_input_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + } + } + }, + "complete_oauth_server_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string", + "path_in_connector_config": ["client_id"] + }, + "client_secret": { + "type": "string", + "path_in_connector_config": ["client_secret"] + } + } + } + } } } diff --git a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile index c0a06babc403..3613d40d0737 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile +++ b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile @@ -25,5 +25,5 @@ COPY source_zendesk_support ./source_zendesk_support ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.10 +LABEL io.airbyte.version=0.1.9 LABEL io.airbyte.name=airbyte/source-zendesk-support diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index 7330a61e8db2..ba73c653c723 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -323,24 +323,6 @@ def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> return params -class IncrementalUnsortedCursorStream(IncrementalUnsortedStream, ABC): - """Stream for loading without sorting but with cursor based pagination""" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - has_more = response.json().get("meta", {}).get("has_more") - if not has_more: - self._finished = True - return None - return response.json().get("meta", {}).get("after_cursor") - - def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(next_page_token=next_page_token, **kwargs) - params["page[size]"] = self.page_size - if next_page_token: - params["page[after]"] = next_page_token - return params - - class FullRefreshStream(IncrementalUnsortedPageStream, ABC): """ "Stream for endpoints where there are not any created_at or updated_at fields""" @@ -348,15 +330,22 @@ class FullRefreshStream(IncrementalUnsortedPageStream, ABC): cursor_field = SourceZendeskSupportStream.cursor_field -class IncrementalSortedCursorStream(IncrementalUnsortedCursorStream, ABC): +class IncrementalSortedCursorStream(IncrementalUnsortedStream, ABC): """Stream for loading sorting data with cursor based pagination""" - def request_params(self, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(**kwargs) - if params: - params.update({"sort_by": self.cursor_field, "sort_order": "desc"}) + def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: + params = super().request_params(next_page_token=next_page_token, **kwargs) + params.update({"sort_by": self.cursor_field, "sort_order": "desc", "limit": self.page_size}) + + if next_page_token: + params["cursor"] = next_page_token return params + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + if self.is_finished: + return None + return response.json().get("before_cursor") + class IncrementalSortedPageStream(IncrementalUnsortedPageStream, ABC): """Stream for loading sorting data with normal pagination""" @@ -368,7 +357,7 @@ def request_params(self, **kwargs) -> MutableMapping[str, Any]: return params -class TicketComments(IncrementalSortedCursorStream): +class TicketComments(IncrementalSortedPageStream): """TicketComments stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_comments/ ZenDesk doesn't provide API for loading of all comments by one direct endpoints. Thus at first we loads all updated tickets and after this tries to load all created/updated @@ -379,7 +368,7 @@ class TicketComments(IncrementalSortedCursorStream): raise_on_http_errors = False response_list_name = "comments" - cursor_field = IncrementalSortedCursorStream.created_at_field + cursor_field = IncrementalSortedPageStream.created_at_field def __init__(self, **kwargs): super().__init__(**kwargs) @@ -475,8 +464,7 @@ def parse_response( # 2) pagination and sorting mechanism # 3) cursor pagination and sorting mechanism # 4) without sorting but with pagination -# 5) without sorting but with cursor pagination -# 6) without created_at/updated_at fields +# 5) without created_at/updated_at fields # endpoints provide a built-in incremental approach @@ -518,15 +506,15 @@ def get_last_end_time(self) -> Optional[Union[str, int]]: # endpoints provide a pagination mechanism but we can't manage a response order -class Groups(IncrementalUnsortedCursorStream): +class Groups(IncrementalUnsortedPageStream): """Groups stream: https://developer.zendesk.com/api-reference/ticketing/groups/groups/""" -class GroupMemberships(IncrementalUnsortedCursorStream): +class GroupMemberships(IncrementalUnsortedPageStream): """GroupMemberships stream: https://developer.zendesk.com/api-reference/ticketing/groups/group_memberships/""" -class SatisfactionRatings(IncrementalUnsortedCursorStream): +class SatisfactionRatings(IncrementalUnsortedPageStream): """SatisfactionRatings stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/satisfaction_ratings/ The ZenDesk API for this stream provides the filter "start_time" that can be used for incremental logic @@ -558,17 +546,9 @@ class TicketForms(IncrementalUnsortedPageStream): """TicketForms stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_forms/""" -class TicketMetrics(IncrementalUnsortedCursorStream): +class TicketMetrics(IncrementalUnsortedPageStream): """TicketMetric stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metrics/""" - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - # Tickets are ordered chronologically by created date, from newest to oldest. - # No need to get next page once cursor passed initial state - if self.is_finished: - return None - - return super().next_page_token(response) - class TicketMetricEvents(IncrementalExportStream): """TicketMetricEvents stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metric_events/""" @@ -576,14 +556,14 @@ class TicketMetricEvents(IncrementalExportStream): cursor_field = "time" -class Macros(IncrementalSortedCursorStream): +class Macros(IncrementalSortedPageStream): """Macros stream: https://developer.zendesk.com/api-reference/ticketing/business-rules/macros/""" # endpoints provide a cursor pagination and sorting mechanism -class TicketAudits(IncrementalUnsortedStream): +class TicketAudits(IncrementalSortedCursorStream): """TicketAudits stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_audits/""" # can request a maximum of 1,000 results @@ -594,20 +574,6 @@ class TicketAudits(IncrementalUnsortedStream): # Root of response is 'audits'. As rule as an endpoint name is equal a response list name response_list_name = "audits" - # This endpoint uses a variant of cursor pagination with some differences from cursor pagination used in other endpoints. - def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(next_page_token=next_page_token, **kwargs) - params.update({"sort_by": self.cursor_field, "sort_order": "desc", "limit": self.page_size}) - - if next_page_token: - params["cursor"] = next_page_token - return params - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - if self.is_finished: - return None - return response.json().get("before_cursor") - # endpoints don't provide the updated_at/created_at fields # thus we can't implement an incremental logic for them diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 68dcade070c9..534fe26572f9 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -22,6 +22,7 @@ import io.airbyte.oauth.flows.SalesforceOAuthFlow; import io.airbyte.oauth.flows.SlackOAuthFlow; import io.airbyte.oauth.flows.SnapchatMarketingOAuthFlow; +import io.airbyte.oauth.flows.StravaOAuthFlow; import io.airbyte.oauth.flows.SurveymonkeyOAuthFlow; import io.airbyte.oauth.flows.TrelloOAuthFlow; import io.airbyte.oauth.flows.facebook.FacebookMarketingOAuthFlow; @@ -61,6 +62,7 @@ public OAuthImplementationFactory(final ConfigRepository configRepository, final .put("airbyte/source-salesforce", new SalesforceOAuthFlow(configRepository, httpClient)) .put("airbyte/source-slack", new SlackOAuthFlow(configRepository, httpClient)) .put("airbyte/source-snapchat-marketing", new SnapchatMarketingOAuthFlow(configRepository, httpClient)) + .put("airbyte/source-strava", new StravaOAuthFlow(configRepository, httpClient)) .put("airbyte/source-surveymonkey", new SurveymonkeyOAuthFlow(configRepository, httpClient)) .put("airbyte/source-trello", new TrelloOAuthFlow(configRepository, httpClient)) .put("airbyte/source-youtube-analytics", new YouTubeAnalyticsOAuthFlow(configRepository, httpClient)) diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/StravaOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/StravaOAuthFlow.java new file mode 100644 index 000000000000..99a3dac3f402 --- /dev/null +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/StravaOAuthFlow.java @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.oauth.BaseOAuth2Flow; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.http.HttpClient; +import java.util.Map; +import java.util.UUID; +import java.util.function.Supplier; +import org.apache.http.client.utils.URIBuilder; + +public class StravaOAuthFlow extends BaseOAuth2Flow { + + private static final String AUTHORIZE_URL = "https://www.strava.com/oauth/authorize"; + private static final String ACCESS_TOKEN_URL = "https://www.strava.com/oauth/token"; + + public StravaOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { + super(configRepository, httpClient); + } + + public StravaOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { + super(configRepository, httpClient, stateSupplier, TOKEN_REQUEST_CONTENT_TYPE.JSON); + } + + /** + * Depending on the OAuth flow implementation, the URL to grant user's consent may differ, + * especially in the query parameters to be provided. This function should generate such consent URL + * accordingly. + * + * @param definitionId The configured definition ID of this client + * @param clientId The configured client ID + * @param redirectUrl the redirect URL + */ + @Override + protected String formatConsentUrl(final UUID definitionId, + final String clientId, + final String redirectUrl, + final JsonNode inputOAuthConfiguration) + throws IOException { + + try { + return new URIBuilder(AUTHORIZE_URL) + .addParameter("client_id", clientId) + .addParameter("redirect_uri", redirectUrl) + .addParameter("state", getState()) + .addParameter("scope", getScopes()) + .addParameter("response_type", "code") + .build().toString(); + } catch (final URISyntaxException e) { + throw new IOException("Failed to format Consent URL for OAuth flow", e); + } + } + + @Override + protected Map getAccessTokenQueryParameters(final String clientId, + final String clientSecret, + final String authCode, + final String redirectUrl) { + return ImmutableMap.builder() + // required + .put("client_id", clientId) + .put("redirect_uri", redirectUrl) + .put("client_secret", clientSecret) + .put("code", authCode) + .put("grant_type", "authorization_code") + .build(); + } + + private String getScopes() { + return "activity:read_all"; + } + + /** + * Returns the URL where to retrieve the access token from. + * + */ + @Override + protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { + return ACCESS_TOKEN_URL; + } + +} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/StravaOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/StravaOAuthFlowTest.java new file mode 100644 index 000000000000..2ad3014671b5 --- /dev/null +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/StravaOAuthFlowTest.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import io.airbyte.oauth.BaseOAuthFlow; + +public class StravaOAuthFlowTest extends BaseOAuthFlowTest { + + @Override + protected BaseOAuthFlow getOAuthFlow() { + return new StravaOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); + } + + @Override + protected String getExpectedConsentUrl() { + return "https://www.strava.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=activity%3Aread_all&response_type=code"; + } + +} diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index 43fa671bc27a..6f04689d045c 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-scheduler WORKDIR /app -ADD bin/${APPLICATION}-0.33.12-alpha.tar /app +ADD bin/${APPLICATION}-0.34.1-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.33.12-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.34.1-alpha/bin/${APPLICATION}"] diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index f2cc5fe707a8..08cdf577471e 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -7,7 +7,7 @@ ENV APPLICATION airbyte-server WORKDIR /app -ADD bin/${APPLICATION}-0.33.12-alpha.tar /app +ADD bin/${APPLICATION}-0.34.1-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.33.12-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.34.1-alpha/bin/${APPLICATION}"] diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java index 5107d5d9db04..b8fc5ac489d7 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java @@ -101,7 +101,7 @@ public void deleteDestination(final DestinationIdRequestBody destinationIdReques public void deleteDestination(final DestinationRead destination) throws JsonValidationException, IOException, ConfigNotFoundException { // disable all connections associated with this destination - // Delete connections first in case it it fails in the middle, destination will still be visible + // Delete connections first in case it fails in the middle, destination will still be visible final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody().workspaceId(destination.getWorkspaceId()); for (final ConnectionRead connectionRead : connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody).getConnections()) { if (!connectionRead.getDestinationId().equals(destination.getDestinationId())) { diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java index e5e36483fea0..8cd3be154581 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java @@ -157,7 +157,7 @@ public void deleteSource(final SourceIdRequestBody sourceIdRequestBody) public void deleteSource(final SourceRead source) throws JsonValidationException, IOException, ConfigNotFoundException { // "delete" all connections associated with source as well. - // Delete connections first in case it it fails in the middle, source will still be visible + // Delete connections first in case it fails in the middle, source will still be visible final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody() .workspaceId(source.getWorkspaceId()); for (final ConnectionRead connectionRead : connectionsHandler @@ -170,10 +170,7 @@ public void deleteSource(final SourceRead source) } final ConnectorSpecification spec = getSpecFromSourceId(source.getSourceId()); - validateSource(spec, source.getConnectionConfiguration()); - final var fullConfig = configRepository.getSourceConnectionWithSecrets(source.getSourceId()).getConfiguration(); - validateSource(spec, fullConfig); // persist persistSourceConnection( diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java index 0a574582293a..855533918f0b 100644 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java +++ b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/AcceptanceTests.java @@ -28,6 +28,7 @@ import io.airbyte.api.client.model.AirbyteStreamAndConfiguration; import io.airbyte.api.client.model.AirbyteStreamConfiguration; import io.airbyte.api.client.model.AttemptInfoRead; +import io.airbyte.api.client.model.AttemptStatus; import io.airbyte.api.client.model.CheckConnectionRead; import io.airbyte.api.client.model.ConnectionCreate; import io.airbyte.api.client.model.ConnectionIdRequestBody; @@ -86,6 +87,7 @@ import java.nio.file.Path; import java.sql.SQLException; import java.time.Duration; +import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -102,6 +104,7 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; @@ -130,8 +133,8 @@ public class AcceptanceTests { // assume env file is one directory level up from airbyte-tests. private final static File ENV_FILE = Path.of(System.getProperty("user.dir")).getParent().resolve(".env").toFile(); - private static final String SOURCE_E2E_TEST_CONNECTOR_VERSION = "0.1.0"; - private static final String DESTINATION_E2E_TEST_CONNECTOR_VERSION = "0.1.0"; + private static final String SOURCE_E2E_TEST_CONNECTOR_VERSION = "0.1.1"; + private static final String DESTINATION_E2E_TEST_CONNECTOR_VERSION = "0.1.1"; private static final Charset UTF8 = StandardCharsets.UTF_8; private static final boolean IS_KUBE = System.getenv().containsKey("KUBE"); @@ -860,6 +863,77 @@ public void testBackpressure() throws Exception { } } + // This test is disabled because it takes a couple minutes to run, as it is testing timeouts. + // It should be re-enabled when the @SlowIntegrationTest can be applied to it. + // See relevant issue: https://github.com/airbytehq/airbyte/issues/8397 + @Test + @Order(17) + @Disabled + public void testFailureTimeout() throws Exception { + final SourceDefinitionRead sourceDefinition = apiClient.getSourceDefinitionApi().createSourceDefinition(new SourceDefinitionCreate() + .name("E2E Test Source") + .dockerRepository("airbyte/source-e2e-test") + .dockerImageTag(SOURCE_E2E_TEST_CONNECTOR_VERSION) + .documentationUrl(URI.create("https://example.com"))); + + final DestinationDefinitionRead destinationDefinition = apiClient.getDestinationDefinitionApi() + .createDestinationDefinition(new DestinationDefinitionCreate() + .name("E2E Test Destination") + .dockerRepository("airbyte/destination-e2e-test") + .dockerImageTag(DESTINATION_E2E_TEST_CONNECTOR_VERSION) + .documentationUrl(URI.create("https://example.com"))); + + final SourceRead source = createSource( + "E2E Test Source -" + UUID.randomUUID(), + workspaceId, + sourceDefinition.getSourceDefinitionId(), + Jsons.jsonNode(ImmutableMap.builder() + .put("type", "INFINITE_FEED") + .put("max_records", 1000) + .put("message_interval", 100) + .build())); + + // Destination fails after processing 5 messages, so the job should fail after the graceful close + // timeout of 1 minute + final DestinationRead destination = createDestination( + "E2E Test Destination -" + UUID.randomUUID(), + workspaceId, + destinationDefinition.getDestinationDefinitionId(), + Jsons.jsonNode(ImmutableMap.builder() + .put("type", "FAILING") + .put("num_messages", 5) + .build())); + + final String connectionName = "test-connection"; + final UUID sourceId = source.getSourceId(); + final UUID destinationId = destination.getDestinationId(); + final AirbyteCatalog catalog = discoverSourceSchema(sourceId); + + final UUID connectionId = + createConnection(connectionName, sourceId, destinationId, Collections.emptyList(), catalog, null) + .getConnectionId(); + + final JobInfoRead connectionSyncRead1 = apiClient.getConnectionApi() + .syncConnection(new ConnectionIdRequestBody().connectionId(connectionId)); + + // wait to get out of pending. + final JobRead runningJob = waitForJob(apiClient.getJobsApi(), connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); + + // wait for job for max of 3 minutes, by which time the job attempt should have failed + waitForJob(apiClient.getJobsApi(), runningJob, Sets.newHashSet(JobStatus.RUNNING), Duration.ofMinutes(3)); + + final JobIdRequestBody jobId = new JobIdRequestBody().id(runningJob.getId()); + final JobInfoRead jobInfo = apiClient.getJobsApi().getJobInfo(jobId); + final AttemptInfoRead attemptInfoRead = jobInfo.getAttempts().get(jobInfo.getAttempts().size() - 1); + + // assert that the job attempt failed, and cancel the job regardless of status to prevent retries + try { + assertEquals(AttemptStatus.FAILED, attemptInfoRead.getAttempt().getStatus()); + } finally { + apiClient.getJobsApi().cancelJob(jobId); + } + } + private AirbyteCatalog discoverSourceSchema(final UUID sourceId) throws ApiException { return apiClient.getSourceApi().discoverSchemaForSource(new SourceIdRequestBody().sourceId(sourceId)).getCatalog(); } @@ -1199,14 +1273,23 @@ private static void waitForSuccessfulJob(final JobsApi jobsApi, final JobRead or assertEquals(JobStatus.SUCCEEDED, job.getStatus()); } - @SuppressWarnings("BusyWait") private static JobRead waitForJob(final JobsApi jobsApi, final JobRead originalJob, final Set jobStatuses) throws InterruptedException, ApiException { + return waitForJob(jobsApi, originalJob, jobStatuses, Duration.ofMinutes(6)); + } + + @SuppressWarnings("BusyWait") + private static JobRead waitForJob(final JobsApi jobsApi, final JobRead originalJob, final Set jobStatuses, final Duration maxWaitTime) + throws InterruptedException, ApiException { JobRead job = originalJob; - int count = 0; - while (count < 400 && jobStatuses.contains(job.getStatus())) { + + final Instant waitStart = Instant.now(); + while (jobStatuses.contains(job.getStatus())) { + if (Duration.between(waitStart, Instant.now()).compareTo(maxWaitTime) > 0) { + LOGGER.info("Max wait time of {} has been reached. Stopping wait.", maxWaitTime); + break; + } sleep(1000); - count++; job = jobsApi.getJobInfo(new JobIdRequestBody().id(job.getId())).getJob(); LOGGER.info("waiting: job id: {} config type: {} status: {}", job.getId(), job.getConfigType(), job.getStatus()); diff --git a/airbyte-webapp/.storybook/withProvider.tsx b/airbyte-webapp/.storybook/withProvider.tsx index 69f57a67d608..06cf64d09cee 100644 --- a/airbyte-webapp/.storybook/withProvider.tsx +++ b/airbyte-webapp/.storybook/withProvider.tsx @@ -1,7 +1,7 @@ -import { Router } from "react-router-dom"; +import { MemoryRouter } from "react-router-dom"; import * as React from "react"; import { IntlProvider } from "react-intl"; -import { createMemoryHistory } from "history"; +// import { createMemoryHistory } from "history"; import { ThemeProvider } from "styled-components"; // TODO: theme was not working correctly so imported directly @@ -27,21 +27,21 @@ class WithProviders extends React.Component { return ( - - - - - + + + + + {children} - - - - - + + + + + ); } diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 94a1249de2bb..f862b4563b64 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.33.12-alpha", + "version": "0.34.1-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.33.12-alpha", + "version": "0.34.1-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^1.2.36", "@fortawesome/free-brands-svg-icons": "^5.15.4", @@ -32,7 +32,7 @@ "react-markdown": "^7.0.1", "react-pose": "^4.0.10", "react-query": "^3.19.1", - "react-router-dom": "^5.1.2", + "react-router-dom": "6.1.1", "react-select": "^4.3.1", "react-table": "^7.5.0", "react-use": "^15.3.8", @@ -52,10 +52,10 @@ "@storybook/preset-create-react-app": "^3.2.0", "@storybook/react": "^6.3.2", "@storybook/theming": "^6.3.8", - "@testing-library/jest-dom": "^5.11.4", - "@testing-library/react": "^11.1.0", - "@testing-library/react-hooks": "^7.0.1", - "@testing-library/user-event": "^12.1.10", + "@testing-library/jest-dom": "5.16.1", + "@testing-library/react": "12.1.2", + "@testing-library/react-hooks": "^7.0.2", + "@testing-library/user-event": "^13.5.0", "@types/flat": "^5.0.1", "@types/jest": "^24.0.0", "@types/json-schema": "^7.0.6", @@ -66,7 +66,6 @@ "@types/react-dom": "17.0.1", "@types/react-helmet": "6.1.0", "@types/react-lazylog": "^4.5.0", - "@types/react-router-dom": "^5.1.3", "@types/react-select": "^4.0.16", "@types/react-table": "^7.0.12", "@types/react-widgets": "4.4.4", @@ -81,6 +80,7 @@ "lint-staged": "^10.0.8", "prettier": "^2.2.1", "react-scripts": "4.0.2", + "react-select-event": "^5.3.0", "storybook-addon-styled-component-theme": "^2.0.0", "tar": "^6.1.11", "tmpl": "^1.0.5", @@ -15885,22 +15885,22 @@ } }, "node_modules/@testing-library/dom": { - "version": "7.30.3", - "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-7.30.3.tgz", - "integrity": "sha512-7JhIg2MW6WPwyikH2iL3o7z+FTVgSOd2jqCwTAHqK7Qal2gRRYiUQyURAxtbK9VXm/UTyG9bRihv8C5Tznr2zw==", + "version": "8.11.1", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.11.1.tgz", + "integrity": "sha512-3KQDyx9r0RKYailW2MiYrSSKEfH0GTkI51UGEvJenvcoDoeRYs0PZpi2SXqtnMClQvCqdtTTpOfFETDTVADpAg==", "dev": true, "dependencies": { "@babel/code-frame": "^7.10.4", "@babel/runtime": "^7.12.5", "@types/aria-query": "^4.2.0", - "aria-query": "^4.2.2", + "aria-query": "^5.0.0", "chalk": "^4.1.0", - "dom-accessibility-api": "^0.5.4", + "dom-accessibility-api": "^0.5.9", "lz-string": "^1.4.4", - "pretty-format": "^26.6.2" + "pretty-format": "^27.0.2" }, "engines": { - "node": ">=10" + "node": ">=12" } }, "node_modules/@testing-library/dom/node_modules/@babel/runtime": { @@ -15913,43 +15913,43 @@ } }, "node_modules/@testing-library/dom/node_modules/@jest/types": { - "version": "26.6.2", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-26.6.2.tgz", - "integrity": "sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ==", + "version": "27.4.2", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.4.2.tgz", + "integrity": "sha512-j35yw0PMTPpZsUoOBiuHzr1zTYoad1cVIE0ajEjcrJONxxrko/IRGKkXx3os0Nsi4Hu3+5VmDbVfq5WhG/pWAg==", "dev": true, "dependencies": { "@types/istanbul-lib-coverage": "^2.0.0", "@types/istanbul-reports": "^3.0.0", "@types/node": "*", - "@types/yargs": "^15.0.0", + "@types/yargs": "^16.0.0", "chalk": "^4.0.0" }, "engines": { - "node": ">= 10.14.2" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/@testing-library/dom/node_modules/@types/istanbul-reports": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.0.tgz", - "integrity": "sha512-nwKNbvnwJ2/mndE9ItP/zc2TCzw6uuodnF4EHYWD+gCQDVBuRQL5UzbZD0/ezy1iKsFU2ZQiDqg4M9dN4+wZgA==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", "dev": true, "dependencies": { "@types/istanbul-lib-report": "*" } }, "node_modules/@testing-library/dom/node_modules/@types/yargs": { - "version": "15.0.13", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.13.tgz", - "integrity": "sha512-kQ5JNTrbDv3Rp5X2n/iUu37IJBDU2gsZ5R/g1/KHOOEc5IKfUFjXT6DENPGduh08I/pamwtEq4oul7gUqKTQDQ==", + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz", + "integrity": "sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==", "dev": true, "dependencies": { "@types/yargs-parser": "*" } }, "node_modules/@testing-library/dom/node_modules/ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true, "engines": { "node": ">=8" @@ -15970,10 +15970,19 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/@testing-library/dom/node_modules/aria-query": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.0.0.tgz", + "integrity": "sha512-V+SM7AbUwJ+EBnB8+DXs0hPZHO0W6pqBcc0dW90OwtVG02PswOu/teuARoLQjdDOH+t9pJgGnW5/Qmouf3gPJg==", + "dev": true, + "engines": { + "node": ">=6.0" + } + }, "node_modules/@testing-library/dom/node_modules/chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", @@ -16014,18 +16023,30 @@ } }, "node_modules/@testing-library/dom/node_modules/pretty-format": { - "version": "26.6.2", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-26.6.2.tgz", - "integrity": "sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg==", + "version": "27.4.2", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.4.2.tgz", + "integrity": "sha512-p0wNtJ9oLuvgOQDEIZ9zQjZffK7KtyR6Si0jnXULIDwrlNF8Cuir3AZP0hHv0jmKuNN/edOnbMjnzd4uTcmWiw==", "dev": true, "dependencies": { - "@jest/types": "^26.6.2", - "ansi-regex": "^5.0.0", - "ansi-styles": "^4.0.0", + "@jest/types": "^27.4.2", + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", "react-is": "^17.0.1" }, "engines": { - "node": ">= 10" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@testing-library/dom/node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, "node_modules/@testing-library/dom/node_modules/react-is": { @@ -16047,17 +16068,18 @@ } }, "node_modules/@testing-library/jest-dom": { - "version": "5.11.10", - "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-5.11.10.tgz", - "integrity": "sha512-FuKiq5xuk44Fqm0000Z9w0hjOdwZRNzgx7xGGxQYepWFZy+OYUMOT/wPI4nLYXCaVltNVpU1W/qmD88wLWDsqQ==", + "version": "5.16.1", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-5.16.1.tgz", + "integrity": "sha512-ajUJdfDIuTCadB79ukO+0l8O+QwN0LiSxDaYUTI4LndbbUsGi6rWU1SCexXzBA2NSjlVB9/vbkasQIL3tmPBjw==", "dev": true, "dependencies": { "@babel/runtime": "^7.9.2", "@types/testing-library__jest-dom": "^5.9.1", - "aria-query": "^4.2.2", + "aria-query": "^5.0.0", "chalk": "^3.0.0", "css": "^3.0.0", "css.escape": "^1.5.1", + "dom-accessibility-api": "^0.5.6", "lodash": "^4.17.15", "redent": "^3.0.0" }, @@ -16082,6 +16104,15 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/@testing-library/jest-dom/node_modules/aria-query": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.0.0.tgz", + "integrity": "sha512-V+SM7AbUwJ+EBnB8+DXs0hPZHO0W6pqBcc0dW90OwtVG02PswOu/teuARoLQjdDOH+t9pJgGnW5/Qmouf3gPJg==", + "dev": true, + "engines": { + "node": ">=6.0" + } + }, "node_modules/@testing-library/jest-dom/node_modules/chalk": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", @@ -16165,16 +16196,16 @@ } }, "node_modules/@testing-library/react": { - "version": "11.2.6", - "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-11.2.6.tgz", - "integrity": "sha512-TXMCg0jT8xmuU8BkKMtp8l7Z50Ykew5WNX8UoIKTaLFwKkP2+1YDhOLA2Ga3wY4x29jyntk7EWfum0kjlYiSjQ==", + "version": "12.1.2", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-12.1.2.tgz", + "integrity": "sha512-ihQiEOklNyHIpo2Y8FREkyD1QAea054U0MVbwH1m8N9TxeFz+KoJ9LkqoKqJlzx2JDm56DVwaJ1r36JYxZM05g==", "dev": true, "dependencies": { "@babel/runtime": "^7.12.5", - "@testing-library/dom": "^7.28.1" + "@testing-library/dom": "^8.0.0" }, "engines": { - "node": ">=10" + "node": ">=12" }, "peerDependencies": { "react": "*", @@ -16182,9 +16213,9 @@ } }, "node_modules/@testing-library/react-hooks": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/@testing-library/react-hooks/-/react-hooks-7.0.1.tgz", - "integrity": "sha512-bpEQ2SHSBSzBmfJ437NmnP+oArQ7aVmmULiAp6Ag2rtyLBLPNFSMmgltUbFGmQOJdPWo4Ub31kpUC5T46zXNwQ==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/@testing-library/react-hooks/-/react-hooks-7.0.2.tgz", + "integrity": "sha512-dYxpz8u9m4q1TuzfcUApqi8iFfR6R0FaMbr2hjZJy1uC8z+bO/K4v8Gs9eogGKYQop7QsrBTFkv/BCF7MzD2Cg==", "dev": true, "dependencies": { "@babel/runtime": "^7.12.5", @@ -16232,9 +16263,9 @@ } }, "node_modules/@testing-library/user-event": { - "version": "12.8.3", - "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-12.8.3.tgz", - "integrity": "sha512-IR0iWbFkgd56Bu5ZI/ej8yQwrkCv8Qydx6RzwbKz9faXazR/+5tvYKsZQgyXJiwgpcva127YO6JcWy7YlCfofQ==", + "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-13.5.0.tgz", + "integrity": "sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg==", "dev": true, "dependencies": { "@babel/runtime": "^7.12.5" @@ -16431,12 +16462,6 @@ "@types/unist": "*" } }, - "node_modules/@types/history": { - "version": "4.7.7", - "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.7.tgz", - "integrity": "sha512-2xtoL22/3Mv6a70i4+4RB7VgbDDORoWwjcqeNysojZA0R7NK17RbY5Gof/2QiFfJgX+KkWghbwJ+d/2SB8Ndzg==", - "dev": true - }, "node_modules/@types/hoist-non-react-statics": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz", @@ -16719,27 +16744,6 @@ "@types/react": "*" } }, - "node_modules/@types/react-router": { - "version": "5.1.8", - "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-5.1.8.tgz", - "integrity": "sha512-HzOyJb+wFmyEhyfp4D4NYrumi+LQgQL/68HvJO+q6XtuHSDvw6Aqov7sCAhjbNq3bUPgPqbdvjXC5HeB2oEAPg==", - "dev": true, - "dependencies": { - "@types/history": "*", - "@types/react": "*" - } - }, - "node_modules/@types/react-router-dom": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.1.5.tgz", - "integrity": "sha512-ArBM4B1g3BWLGbaGvwBGO75GNFbLDUthrDojV2vHLih/Tq8M+tgvY1DSwkuNrPSwdp/GUL93WSEpTZs8nVyJLw==", - "dev": true, - "dependencies": { - "@types/history": "*", - "@types/react": "*", - "@types/react-router": "*" - } - }, "node_modules/@types/react-select": { "version": "4.0.16", "resolved": "https://registry.npmjs.org/@types/react-select/-/react-select-4.0.16.tgz", @@ -22384,9 +22388,9 @@ } }, "node_modules/dom-accessibility-api": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.4.tgz", - "integrity": "sha512-TvrjBckDy2c6v6RLxPv5QXOnU+SmF9nBII5621Ve5fu6Z/BDrENurBEvlC1f44lKEUVqOpK4w9E5Idc5/EgkLQ==", + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.10.tgz", + "integrity": "sha512-Xu9mD0UjrJisTmv7lmVSDMagQcU9R5hwAbxsaAE/35XPnPLJobbuREfV/rraiSaEj/UOvgrzQs66zyTWTlyd+g==", "dev": true }, "node_modules/dom-converter": { @@ -26288,16 +26292,11 @@ } }, "node_modules/history": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/history/-/history-4.10.1.tgz", - "integrity": "sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/history/-/history-5.1.0.tgz", + "integrity": "sha512-zPuQgPacm2vH2xdORvGGz1wQMuHSIB56yNAy5FnLuwOwgSYyPKptJtcMm6Ev+hRGeS+GzhbmRacHzvlESbFwDg==", "dependencies": { - "@babel/runtime": "^7.1.2", - "loose-envify": "^1.2.0", - "resolve-pathname": "^3.0.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0", - "value-equal": "^1.0.1" + "@babel/runtime": "^7.7.6" } }, "node_modules/hmac-drbg": { @@ -33999,19 +33998,6 @@ "node": ">=4" } }, - "node_modules/mini-create-react-context": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/mini-create-react-context/-/mini-create-react-context-0.4.0.tgz", - "integrity": "sha512-b0TytUgFSbgFJGzJqXPKCFCBWigAjpjo+Fl7Vf7ZbKRDptszpppKxXH6DRXEABZ/gcEQczeb0iZ7JvL8e8jjCA==", - "dependencies": { - "@babel/runtime": "^7.5.5", - "tiny-warning": "^1.0.3" - }, - "peerDependencies": { - "prop-types": "^15.0.0", - "react": "^0.14.0 || ^15.0.0 || ^16.0.0" - } - }, "node_modules/mini-css-extract-plugin": { "version": "0.11.3", "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-0.11.3.tgz", @@ -35403,19 +35389,6 @@ "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" }, - "node_modules/path-to-regexp": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", - "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", - "dependencies": { - "isarray": "0.0.1" - } - }, - "node_modules/path-to-regexp/node_modules/isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" - }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -38580,41 +38553,28 @@ "react-dom": "^16.0.0 || ^17.0.0" } }, - "node_modules/react-router": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-5.2.0.tgz", - "integrity": "sha512-smz1DUuFHRKdcJC0jobGo8cVbhO3x50tCL4icacOlcwDOEQPq4TMqwx3sY1TP+DvtTgz4nm3thuo7A+BK2U0Dw==", - "dependencies": { - "@babel/runtime": "^7.1.2", - "history": "^4.9.0", - "hoist-non-react-statics": "^3.1.0", - "loose-envify": "^1.3.1", - "mini-create-react-context": "^0.4.0", - "path-to-regexp": "^1.7.0", - "prop-types": "^15.6.2", - "react-is": "^16.6.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0" + "node_modules/react-router-dom": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.1.1.tgz", + "integrity": "sha512-O3UH89DI4o+swd2q6lF4dSmpuNCxwkUXcj0zAFcVc1H+YoPE6T7uwoFMX0ws1pUvCY8lYDucFpOqCCdal6VFzg==", + "dependencies": { + "history": "^5.1.0", + "react-router": "6.1.1" }, "peerDependencies": { - "react": ">=15" + "react": ">=16.8", + "react-dom": ">=16.8" } }, - "node_modules/react-router-dom": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.2.0.tgz", - "integrity": "sha512-gxAmfylo2QUjcwxI63RhQ5G85Qqt4voZpUXSEqCwykV0baaOTQDR1f0PmY8AELqIyVc0NEZUj0Gov5lNGcXgsA==", + "node_modules/react-router-dom/node_modules/react-router": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.1.1.tgz", + "integrity": "sha512-55o96RiDZmC0uD17DPqVmzzfdNd2Dc+EjkYvMAmHl43du/GItaTdFr5WwjTryNWPXZ+OOVQxQhwAX25UwxpHtw==", "dependencies": { - "@babel/runtime": "^7.1.2", - "history": "^4.9.0", - "loose-envify": "^1.3.1", - "prop-types": "^15.6.2", - "react-router": "5.2.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0" + "history": "^5.1.0" }, "peerDependencies": { - "react": ">=15" + "react": ">=16.8" } }, "node_modules/react-scripts": { @@ -38780,6 +38740,15 @@ "react-dom": "^16.8.0 || ^17.0.0" } }, + "node_modules/react-select-event": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/react-select-event/-/react-select-event-5.3.0.tgz", + "integrity": "sha512-Novkl7X9JJKmDV5LyYaKwl0vffWtqPrBa1vuI0v43P/f87mSA7JfdYxU93SFb99RssphVzBSIAbcnbX1w21QIQ==", + "dev": true, + "dependencies": { + "@testing-library/dom": ">=7" + } + }, "node_modules/react-select/node_modules/@babel/runtime": { "version": "7.14.6", "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.14.6.tgz", @@ -40433,11 +40402,6 @@ "node": ">=4" } }, - "node_modules/resolve-pathname": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz", - "integrity": "sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==" - }, "node_modules/resolve-url": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", @@ -43559,11 +43523,6 @@ "integrity": "sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=", "dev": true }, - "node_modules/tiny-invariant": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.1.0.tgz", - "integrity": "sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw==" - }, "node_modules/tiny-warning": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", @@ -44677,11 +44636,6 @@ "spdx-expression-parse": "^3.0.0" } }, - "node_modules/value-equal": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz", - "integrity": "sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==" - }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", @@ -59174,19 +59128,19 @@ } }, "@testing-library/dom": { - "version": "7.30.3", - "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-7.30.3.tgz", - "integrity": "sha512-7JhIg2MW6WPwyikH2iL3o7z+FTVgSOd2jqCwTAHqK7Qal2gRRYiUQyURAxtbK9VXm/UTyG9bRihv8C5Tznr2zw==", + "version": "8.11.1", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.11.1.tgz", + "integrity": "sha512-3KQDyx9r0RKYailW2MiYrSSKEfH0GTkI51UGEvJenvcoDoeRYs0PZpi2SXqtnMClQvCqdtTTpOfFETDTVADpAg==", "dev": true, "requires": { "@babel/code-frame": "^7.10.4", "@babel/runtime": "^7.12.5", "@types/aria-query": "^4.2.0", - "aria-query": "^4.2.2", + "aria-query": "^5.0.0", "chalk": "^4.1.0", - "dom-accessibility-api": "^0.5.4", + "dom-accessibility-api": "^0.5.9", "lz-string": "^1.4.4", - "pretty-format": "^26.6.2" + "pretty-format": "^27.0.2" }, "dependencies": { "@babel/runtime": { @@ -59199,40 +59153,40 @@ } }, "@jest/types": { - "version": "26.6.2", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-26.6.2.tgz", - "integrity": "sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ==", + "version": "27.4.2", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.4.2.tgz", + "integrity": "sha512-j35yw0PMTPpZsUoOBiuHzr1zTYoad1cVIE0ajEjcrJONxxrko/IRGKkXx3os0Nsi4Hu3+5VmDbVfq5WhG/pWAg==", "dev": true, "requires": { "@types/istanbul-lib-coverage": "^2.0.0", "@types/istanbul-reports": "^3.0.0", "@types/node": "*", - "@types/yargs": "^15.0.0", + "@types/yargs": "^16.0.0", "chalk": "^4.0.0" } }, "@types/istanbul-reports": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.0.tgz", - "integrity": "sha512-nwKNbvnwJ2/mndE9ItP/zc2TCzw6uuodnF4EHYWD+gCQDVBuRQL5UzbZD0/ezy1iKsFU2ZQiDqg4M9dN4+wZgA==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", "dev": true, "requires": { "@types/istanbul-lib-report": "*" } }, "@types/yargs": { - "version": "15.0.13", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.13.tgz", - "integrity": "sha512-kQ5JNTrbDv3Rp5X2n/iUu37IJBDU2gsZ5R/g1/KHOOEc5IKfUFjXT6DENPGduh08I/pamwtEq4oul7gUqKTQDQ==", + "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz", + "integrity": "sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==", "dev": true, "requires": { "@types/yargs-parser": "*" } }, "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true }, "ansi-styles": { @@ -59244,10 +59198,16 @@ "color-convert": "^2.0.1" } }, + "aria-query": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.0.0.tgz", + "integrity": "sha512-V+SM7AbUwJ+EBnB8+DXs0hPZHO0W6pqBcc0dW90OwtVG02PswOu/teuARoLQjdDOH+t9pJgGnW5/Qmouf3gPJg==", + "dev": true + }, "chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -59276,15 +59236,23 @@ "dev": true }, "pretty-format": { - "version": "26.6.2", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-26.6.2.tgz", - "integrity": "sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg==", + "version": "27.4.2", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.4.2.tgz", + "integrity": "sha512-p0wNtJ9oLuvgOQDEIZ9zQjZffK7KtyR6Si0jnXULIDwrlNF8Cuir3AZP0hHv0jmKuNN/edOnbMjnzd4uTcmWiw==", "dev": true, "requires": { - "@jest/types": "^26.6.2", - "ansi-regex": "^5.0.0", - "ansi-styles": "^4.0.0", + "@jest/types": "^27.4.2", + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", "react-is": "^17.0.1" + }, + "dependencies": { + "ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true + } } }, "react-is": { @@ -59305,17 +59273,18 @@ } }, "@testing-library/jest-dom": { - "version": "5.11.10", - "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-5.11.10.tgz", - "integrity": "sha512-FuKiq5xuk44Fqm0000Z9w0hjOdwZRNzgx7xGGxQYepWFZy+OYUMOT/wPI4nLYXCaVltNVpU1W/qmD88wLWDsqQ==", + "version": "5.16.1", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-5.16.1.tgz", + "integrity": "sha512-ajUJdfDIuTCadB79ukO+0l8O+QwN0LiSxDaYUTI4LndbbUsGi6rWU1SCexXzBA2NSjlVB9/vbkasQIL3tmPBjw==", "dev": true, "requires": { "@babel/runtime": "^7.9.2", "@types/testing-library__jest-dom": "^5.9.1", - "aria-query": "^4.2.2", + "aria-query": "^5.0.0", "chalk": "^3.0.0", "css": "^3.0.0", "css.escape": "^1.5.1", + "dom-accessibility-api": "^0.5.6", "lodash": "^4.17.15", "redent": "^3.0.0" }, @@ -59329,6 +59298,12 @@ "color-convert": "^2.0.1" } }, + "aria-query": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.0.0.tgz", + "integrity": "sha512-V+SM7AbUwJ+EBnB8+DXs0hPZHO0W6pqBcc0dW90OwtVG02PswOu/teuARoLQjdDOH+t9pJgGnW5/Qmouf3gPJg==", + "dev": true + }, "chalk": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", @@ -59399,13 +59374,13 @@ } }, "@testing-library/react": { - "version": "11.2.6", - "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-11.2.6.tgz", - "integrity": "sha512-TXMCg0jT8xmuU8BkKMtp8l7Z50Ykew5WNX8UoIKTaLFwKkP2+1YDhOLA2Ga3wY4x29jyntk7EWfum0kjlYiSjQ==", + "version": "12.1.2", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-12.1.2.tgz", + "integrity": "sha512-ihQiEOklNyHIpo2Y8FREkyD1QAea054U0MVbwH1m8N9TxeFz+KoJ9LkqoKqJlzx2JDm56DVwaJ1r36JYxZM05g==", "dev": true, "requires": { "@babel/runtime": "^7.12.5", - "@testing-library/dom": "^7.28.1" + "@testing-library/dom": "^8.0.0" }, "dependencies": { "@babel/runtime": { @@ -59420,9 +59395,9 @@ } }, "@testing-library/react-hooks": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/@testing-library/react-hooks/-/react-hooks-7.0.1.tgz", - "integrity": "sha512-bpEQ2SHSBSzBmfJ437NmnP+oArQ7aVmmULiAp6Ag2rtyLBLPNFSMmgltUbFGmQOJdPWo4Ub31kpUC5T46zXNwQ==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/@testing-library/react-hooks/-/react-hooks-7.0.2.tgz", + "integrity": "sha512-dYxpz8u9m4q1TuzfcUApqi8iFfR6R0FaMbr2hjZJy1uC8z+bO/K4v8Gs9eogGKYQop7QsrBTFkv/BCF7MzD2Cg==", "dev": true, "requires": { "@babel/runtime": "^7.12.5", @@ -59444,9 +59419,9 @@ } }, "@testing-library/user-event": { - "version": "12.8.3", - "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-12.8.3.tgz", - "integrity": "sha512-IR0iWbFkgd56Bu5ZI/ej8yQwrkCv8Qydx6RzwbKz9faXazR/+5tvYKsZQgyXJiwgpcva127YO6JcWy7YlCfofQ==", + "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-13.5.0.tgz", + "integrity": "sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg==", "dev": true, "requires": { "@babel/runtime": "^7.12.5" @@ -59638,12 +59613,6 @@ "@types/unist": "*" } }, - "@types/history": { - "version": "4.7.7", - "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.7.tgz", - "integrity": "sha512-2xtoL22/3Mv6a70i4+4RB7VgbDDORoWwjcqeNysojZA0R7NK17RbY5Gof/2QiFfJgX+KkWghbwJ+d/2SB8Ndzg==", - "dev": true - }, "@types/hoist-non-react-statics": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz", @@ -59924,27 +59893,6 @@ "@types/react": "*" } }, - "@types/react-router": { - "version": "5.1.8", - "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-5.1.8.tgz", - "integrity": "sha512-HzOyJb+wFmyEhyfp4D4NYrumi+LQgQL/68HvJO+q6XtuHSDvw6Aqov7sCAhjbNq3bUPgPqbdvjXC5HeB2oEAPg==", - "dev": true, - "requires": { - "@types/history": "*", - "@types/react": "*" - } - }, - "@types/react-router-dom": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.1.5.tgz", - "integrity": "sha512-ArBM4B1g3BWLGbaGvwBGO75GNFbLDUthrDojV2vHLih/Tq8M+tgvY1DSwkuNrPSwdp/GUL93WSEpTZs8nVyJLw==", - "dev": true, - "requires": { - "@types/history": "*", - "@types/react": "*", - "@types/react-router": "*" - } - }, "@types/react-select": { "version": "4.0.16", "resolved": "https://registry.npmjs.org/@types/react-select/-/react-select-4.0.16.tgz", @@ -64555,9 +64503,9 @@ } }, "dom-accessibility-api": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.4.tgz", - "integrity": "sha512-TvrjBckDy2c6v6RLxPv5QXOnU+SmF9nBII5621Ve5fu6Z/BDrENurBEvlC1f44lKEUVqOpK4w9E5Idc5/EgkLQ==", + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.10.tgz", + "integrity": "sha512-Xu9mD0UjrJisTmv7lmVSDMagQcU9R5hwAbxsaAE/35XPnPLJobbuREfV/rraiSaEj/UOvgrzQs66zyTWTlyd+g==", "dev": true }, "dom-converter": { @@ -67699,16 +67647,11 @@ "dev": true }, "history": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/history/-/history-4.10.1.tgz", - "integrity": "sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/history/-/history-5.1.0.tgz", + "integrity": "sha512-zPuQgPacm2vH2xdORvGGz1wQMuHSIB56yNAy5FnLuwOwgSYyPKptJtcMm6Ev+hRGeS+GzhbmRacHzvlESbFwDg==", "requires": { - "@babel/runtime": "^7.1.2", - "loose-envify": "^1.2.0", - "resolve-pathname": "^3.0.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0", - "value-equal": "^1.0.1" + "@babel/runtime": "^7.7.6" } }, "hmac-drbg": { @@ -73568,15 +73511,6 @@ "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", "dev": true }, - "mini-create-react-context": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/mini-create-react-context/-/mini-create-react-context-0.4.0.tgz", - "integrity": "sha512-b0TytUgFSbgFJGzJqXPKCFCBWigAjpjo+Fl7Vf7ZbKRDptszpppKxXH6DRXEABZ/gcEQczeb0iZ7JvL8e8jjCA==", - "requires": { - "@babel/runtime": "^7.5.5", - "tiny-warning": "^1.0.3" - } - }, "mini-css-extract-plugin": { "version": "0.11.3", "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-0.11.3.tgz", @@ -74720,21 +74654,6 @@ "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" }, - "path-to-regexp": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", - "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", - "requires": { - "isarray": "0.0.1" - }, - "dependencies": { - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" - } - } - }, "path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -77260,35 +77179,23 @@ "resize-observer-polyfill": "^1.5.1" } }, - "react-router": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-5.2.0.tgz", - "integrity": "sha512-smz1DUuFHRKdcJC0jobGo8cVbhO3x50tCL4icacOlcwDOEQPq4TMqwx3sY1TP+DvtTgz4nm3thuo7A+BK2U0Dw==", - "requires": { - "@babel/runtime": "^7.1.2", - "history": "^4.9.0", - "hoist-non-react-statics": "^3.1.0", - "loose-envify": "^1.3.1", - "mini-create-react-context": "^0.4.0", - "path-to-regexp": "^1.7.0", - "prop-types": "^15.6.2", - "react-is": "^16.6.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0" - } - }, "react-router-dom": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.2.0.tgz", - "integrity": "sha512-gxAmfylo2QUjcwxI63RhQ5G85Qqt4voZpUXSEqCwykV0baaOTQDR1f0PmY8AELqIyVc0NEZUj0Gov5lNGcXgsA==", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.1.1.tgz", + "integrity": "sha512-O3UH89DI4o+swd2q6lF4dSmpuNCxwkUXcj0zAFcVc1H+YoPE6T7uwoFMX0ws1pUvCY8lYDucFpOqCCdal6VFzg==", "requires": { - "@babel/runtime": "^7.1.2", - "history": "^4.9.0", - "loose-envify": "^1.3.1", - "prop-types": "^15.6.2", - "react-router": "5.2.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0" + "history": "^5.1.0", + "react-router": "6.1.1" + }, + "dependencies": { + "react-router": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.1.1.tgz", + "integrity": "sha512-55o96RiDZmC0uD17DPqVmzzfdNd2Dc+EjkYvMAmHl43du/GItaTdFr5WwjTryNWPXZ+OOVQxQhwAX25UwxpHtw==", + "requires": { + "history": "^5.1.0" + } + } } }, "react-scripts": { @@ -77475,6 +77382,15 @@ } } }, + "react-select-event": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/react-select-event/-/react-select-event-5.3.0.tgz", + "integrity": "sha512-Novkl7X9JJKmDV5LyYaKwl0vffWtqPrBa1vuI0v43P/f87mSA7JfdYxU93SFb99RssphVzBSIAbcnbX1w21QIQ==", + "dev": true, + "requires": { + "@testing-library/dom": ">=7" + } + }, "react-side-effect": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/react-side-effect/-/react-side-effect-2.1.1.tgz", @@ -78679,11 +78595,6 @@ "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true }, - "resolve-pathname": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz", - "integrity": "sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==" - }, "resolve-url": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", @@ -81182,11 +81093,6 @@ "integrity": "sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=", "dev": true }, - "tiny-invariant": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.1.0.tgz", - "integrity": "sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw==" - }, "tiny-warning": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", @@ -82024,11 +81930,6 @@ "spdx-expression-parse": "^3.0.0" } }, - "value-equal": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz", - "integrity": "sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==" - }, "vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index c42d49ba0b5c..987a0b1edaf1 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.33.12-alpha", + "version": "0.34.1-alpha", "private": true, "scripts": { "start": "react-scripts start", @@ -35,7 +35,7 @@ "react-markdown": "^7.0.1", "react-pose": "^4.0.10", "react-query": "^3.19.1", - "react-router-dom": "^5.1.2", + "react-router-dom": "6.1.1", "react-select": "^4.3.1", "react-table": "^7.5.0", "react-use": "^15.3.8", @@ -55,10 +55,10 @@ "@storybook/preset-create-react-app": "^3.2.0", "@storybook/react": "^6.3.2", "@storybook/theming": "^6.3.8", - "@testing-library/jest-dom": "^5.11.4", - "@testing-library/react": "^11.1.0", - "@testing-library/react-hooks": "^7.0.1", - "@testing-library/user-event": "^12.1.10", + "@testing-library/jest-dom": "5.16.1", + "@testing-library/react": "12.1.2", + "@testing-library/react-hooks": "^7.0.2", + "@testing-library/user-event": "^13.5.0", "@types/flat": "^5.0.1", "@types/jest": "^24.0.0", "@types/json-schema": "^7.0.6", @@ -69,7 +69,6 @@ "@types/react-dom": "17.0.1", "@types/react-helmet": "6.1.0", "@types/react-lazylog": "^4.5.0", - "@types/react-router-dom": "^5.1.3", "@types/react-select": "^4.0.16", "@types/react-table": "^7.0.12", "@types/react-widgets": "4.4.4", @@ -84,6 +83,7 @@ "lint-staged": "^10.0.8", "prettier": "^2.2.1", "react-scripts": "4.0.2", + "react-select-event": "^5.3.0", "storybook-addon-styled-component-theme": "^2.0.0", "tar": "^6.1.11", "tmpl": "^1.0.5", diff --git a/airbyte-webapp/src/App.tsx b/airbyte-webapp/src/App.tsx index dc295a8a282c..210f90705509 100644 --- a/airbyte-webapp/src/App.tsx +++ b/airbyte-webapp/src/App.tsx @@ -3,6 +3,7 @@ import { ThemeProvider } from "styled-components"; import { IntlProvider } from "react-intl"; import { CacheProvider } from "rest-hooks"; import { QueryClient, QueryClientProvider } from "react-query"; +import { BrowserRouter as Router } from "react-router-dom"; import en from "./locales/en.json"; import GlobalStyle from "./global-styles"; @@ -13,11 +14,9 @@ import LoadingPage from "./components/LoadingPage"; import ApiErrorBoundary from "./components/ApiErrorBoundary"; import NotificationService from "hooks/services/Notification"; import { AnalyticsProvider } from "views/common/AnalyticsProvider"; -import { usePickFirstWorkspace } from "hooks/services/useWorkspace"; import { FeatureService } from "hooks/services/Feature"; -import { OnboardingServiceProvider } from "hooks/services/Onboarding"; import { ServicesProvider } from "core/servicesProvider"; -import { useApiServices } from "core/defaultServices"; +import { ApiServices } from "core/ApiServices"; import { Config, ConfigServiceProvider, @@ -26,6 +25,7 @@ import { ValueProvider, windowConfigProvider, } from "./config"; +import { WorkspaceServiceProvider } from "./services/workspaces/WorkspacesService"; const StyleProvider: React.FC = ({ children }) => ( @@ -40,7 +40,13 @@ const I18NProvider: React.FC = ({ children }) => ( ); -const queryClient = new QueryClient(); +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + suspense: true, + }, + }, +}); const StoreProvider: React.FC = ({ children }) => ( @@ -53,47 +59,40 @@ const configProviders: ValueProvider = [ windowConfigProvider, ]; -const services = { - currentWorkspaceProvider: usePickFirstWorkspace, -}; - -const AppServices: React.FC = ({ children }) => ( - - {children} - +const Services: React.FC = ({ children }) => ( + + + + + + {children} + + + + + ); -const ServiceOverrides: React.FC = React.memo(({ children }) => { - useApiServices(); - return <>{children}; -}); - const App: React.FC = () => { return ( - }> - - - - - - - - - - - - - - - - + + }> + + + + + + + + + diff --git a/airbyte-webapp/src/components/ApiErrorBoundary/ApiErrorBoundary.tsx b/airbyte-webapp/src/components/ApiErrorBoundary/ApiErrorBoundary.tsx index 8f5aae2193d7..40bb5d5f0e97 100644 --- a/airbyte-webapp/src/components/ApiErrorBoundary/ApiErrorBoundary.tsx +++ b/airbyte-webapp/src/components/ApiErrorBoundary/ApiErrorBoundary.tsx @@ -57,7 +57,7 @@ class ApiErrorBoundary extends React.Component { return !this.state.errorId ? ( this.props.children ) : ( - + ); } } diff --git a/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx b/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx index 149c053e2021..c655d810aa31 100644 --- a/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx +++ b/airbyte-webapp/src/components/CreateConnectionContent/CreateConnectionContent.tsx @@ -11,8 +11,6 @@ import ContentCard from "components/ContentCard"; import { JobsLogItem } from "components/JobItem"; import ConnectionForm from "views/Connection/ConnectionForm"; import TryAfterErrorBlock from "./components/TryAfterErrorBlock"; -import { Source } from "core/resources/Source"; -import { Destination } from "core/resources/Destination"; import useConnection, { ValuesProps } from "hooks/services/useConnectionHook"; import { useDiscoverSchema } from "hooks/services/useSchemaHook"; @@ -20,6 +18,7 @@ import SourceDefinitionResource from "core/resources/SourceDefinition"; import DestinationDefinitionResource from "core/resources/DestinationDefinition"; import { IDataItem } from "components/base/DropDown/components/Option"; import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; +import { Destination, Source } from "core/domain/connector"; const SkipButton = styled.div` margin-top: 6px; diff --git a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx b/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx index e524ac2104a3..7e72bfae83c8 100644 --- a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx +++ b/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx @@ -2,8 +2,10 @@ import React from "react"; import styled from "styled-components"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faCog } from "@fortawesome/free-solid-svg-icons"; + import useRouter from "hooks/useRouter"; -import { Routes } from "../../../pages/routes"; +import { RoutePaths } from "pages/routes"; +import { useCurrentWorkspace } from "hooks/services/useWorkspace"; type IProps = { id: string; @@ -28,10 +30,14 @@ const Icon = styled(FontAwesomeIcon)` const ConnectorCell: React.FC = ({ id }) => { const { push } = useRouter(); + const { workspaceId } = useCurrentWorkspace(); const openSettings = (event: React.MouseEvent) => { event.stopPropagation(); - push(`${Routes.Connections}/${id}${Routes.Settings}`); + // TODO: Replace with link instead of push + push( + `/${workspaceId}/${RoutePaths.Connections}/${id}/${RoutePaths.Settings}` + ); }; return ( diff --git a/airbyte-webapp/src/components/EntityTable/utils.tsx b/airbyte-webapp/src/components/EntityTable/utils.tsx index 00e423af9e61..e78c2f1d0e02 100644 --- a/airbyte-webapp/src/components/EntityTable/utils.tsx +++ b/airbyte-webapp/src/components/EntityTable/utils.tsx @@ -1,14 +1,16 @@ import { Connection } from "core/resources/Connection"; -import { Source } from "core/resources/Source"; -import { Destination } from "core/resources/Destination"; import Status from "core/statuses"; import { ITableDataItem, EntityTableDataItem, Status as ConnectionStatus, } from "./types"; -import { SourceDefinition } from "core/resources/SourceDefinition"; -import { DestinationDefinition } from "core/resources/DestinationDefinition"; +import { + Destination, + DestinationDefinition, + Source, + SourceDefinition, +} from "core/domain/connector"; // TODO: types in next methods look a bit ugly export function getEntityTableData< diff --git a/airbyte-webapp/src/components/SideMenu/SideMenu.tsx b/airbyte-webapp/src/components/SideMenu/SideMenu.tsx index 9b84e0a91339..d570df4b214e 100644 --- a/airbyte-webapp/src/components/SideMenu/SideMenu.tsx +++ b/airbyte-webapp/src/components/SideMenu/SideMenu.tsx @@ -50,7 +50,7 @@ const SideMenu: React.FC = ({ data, onSelect, activeItem }) => { onSelect(route.path)} /> diff --git a/airbyte-webapp/src/config/ConfigServiceProvider.tsx b/airbyte-webapp/src/config/ConfigServiceProvider.tsx index eebe14fd50fa..da0e865ab4fd 100644 --- a/airbyte-webapp/src/config/ConfigServiceProvider.tsx +++ b/airbyte-webapp/src/config/ConfigServiceProvider.tsx @@ -37,14 +37,17 @@ const ConfigServiceInner: React.FC<{ () => (value ? { config: value } : null), [value] ); + + if (loading) { + return ; + } + return ( - - {loading && providers ? : children} - + {children} ); }; export const ConfigServiceProvider: React.FC<{ defaultConfig: Config; - providers: ValueProvider; + providers?: ValueProvider; }> = React.memo(ConfigServiceInner); diff --git a/airbyte-webapp/src/constants/constants.ts b/airbyte-webapp/src/constants/constants.ts deleted file mode 100644 index c5a65e6eac96..000000000000 --- a/airbyte-webapp/src/constants/constants.ts +++ /dev/null @@ -1,3 +0,0 @@ -export abstract class Constants { - static readonly DEV_IMAGE_TAG: string = "dev"; -} diff --git a/airbyte-webapp/src/core/defaultServices.tsx b/airbyte-webapp/src/core/ApiServices.tsx similarity index 89% rename from airbyte-webapp/src/core/defaultServices.tsx rename to airbyte-webapp/src/core/ApiServices.tsx index 3c3da6f45643..36ca5e8315d5 100644 --- a/airbyte-webapp/src/core/defaultServices.tsx +++ b/airbyte-webapp/src/core/ApiServices.tsx @@ -1,4 +1,4 @@ -import { useEffect, useMemo } from "react"; +import React, { useEffect, useMemo } from "react"; import { useConfig } from "config"; import { RequestMiddleware } from "./request/RequestMiddleware"; @@ -9,7 +9,7 @@ import { OperationService } from "./domain/connection"; import { HealthService } from "./health/HealthService"; import { useGetService, useInjectServices } from "./servicesProvider"; -export const useApiServices = (): void => { +export const ApiServices: React.FC = React.memo(({ children }) => { const config = useConfig(); const middlewares = useGetService( "DefaultRequestMiddlewares" @@ -37,4 +37,6 @@ export const useApiServices = (): void => { ); useInjectServices(services); -}; + + return <>{children}; +}); diff --git a/airbyte-webapp/src/core/domain/connection/types.ts b/airbyte-webapp/src/core/domain/connection/types.ts index 407c18ea9475..ad5de9b3c044 100644 --- a/airbyte-webapp/src/core/domain/connection/types.ts +++ b/airbyte-webapp/src/core/domain/connection/types.ts @@ -1,8 +1,7 @@ import { SyncSchema } from "core/domain/catalog"; -import { Source } from "core/resources/Source"; -import { Destination } from "core/resources/Destination"; import { Operation } from "./operation"; import { AirbyteJSONSchema } from "core/jsonSchema"; +import { Destination, Source } from "../connector"; type ConnectionConfiguration = unknown; diff --git a/airbyte-webapp/src/core/domain/connector/DestinationDefinitionService.ts b/airbyte-webapp/src/core/domain/connector/DestinationDefinitionService.ts index 3fdf11c5561d..3cc8690f9447 100644 --- a/airbyte-webapp/src/core/domain/connector/DestinationDefinitionService.ts +++ b/airbyte-webapp/src/core/domain/connector/DestinationDefinitionService.ts @@ -1,5 +1,5 @@ import { AirbyteRequestService } from "core/request/AirbyteRequestService"; -import { DestinationDefinition } from "core/resources/DestinationDefinition"; +import { DestinationDefinition } from "./types"; class DestinationDefinitionService extends AirbyteRequestService { get url(): string { diff --git a/airbyte-webapp/src/core/domain/connector/SourceDefinitionService.ts b/airbyte-webapp/src/core/domain/connector/SourceDefinitionService.ts index d26ce05af55c..20ba570fd814 100644 --- a/airbyte-webapp/src/core/domain/connector/SourceDefinitionService.ts +++ b/airbyte-webapp/src/core/domain/connector/SourceDefinitionService.ts @@ -1,5 +1,5 @@ import { AirbyteRequestService } from "core/request/AirbyteRequestService"; -import { SourceDefinition } from "core/resources/SourceDefinition"; +import { SourceDefinition } from "./types"; class SourceDefinitionService extends AirbyteRequestService { get url(): string { diff --git a/airbyte-webapp/src/core/domain/connector/connector.ts b/airbyte-webapp/src/core/domain/connector/connector.ts index b9ac5eb6742a..4bf94652c975 100644 --- a/airbyte-webapp/src/core/domain/connector/connector.ts +++ b/airbyte-webapp/src/core/domain/connector/connector.ts @@ -1,6 +1,6 @@ import { isSourceDefinition, isSourceDefinitionSpecification } from "./source"; import { ConnectorDefinition, ConnectorDefinitionSpecification } from "./types"; -import { Constants } from "constants/constants"; +import { DEV_IMAGE_TAG } from "./constants"; export class Connector { static id(connector: ConnectorDefinition): string { @@ -17,7 +17,7 @@ export class Connector { return ( (!Connector.isDeprecated(connector) && connector.latestDockerImageTag !== connector.dockerImageTag) || - connector.dockerImageTag === Constants.DEV_IMAGE_TAG + connector.dockerImageTag === DEV_IMAGE_TAG ); } } diff --git a/airbyte-webapp/src/core/domain/connector/constants.ts b/airbyte-webapp/src/core/domain/connector/constants.ts new file mode 100644 index 000000000000..13d65539fbfd --- /dev/null +++ b/airbyte-webapp/src/core/domain/connector/constants.ts @@ -0,0 +1 @@ +export const DEV_IMAGE_TAG = "dev"; diff --git a/airbyte-webapp/src/core/domain/connector/destination.ts b/airbyte-webapp/src/core/domain/connector/destination.ts index a1af20770f90..b8cf2fa50fa5 100644 --- a/airbyte-webapp/src/core/domain/connector/destination.ts +++ b/airbyte-webapp/src/core/domain/connector/destination.ts @@ -1,5 +1,4 @@ -import { ConnectorDefinition } from "./types"; -import { DestinationDefinition } from "../../resources/DestinationDefinition"; +import { ConnectorDefinition, DestinationDefinition } from "./types"; export function isDestinationDefinition( connector: ConnectorDefinition diff --git a/airbyte-webapp/src/core/domain/connector/source.ts b/airbyte-webapp/src/core/domain/connector/source.ts index bfb71ec10b4b..cfabbf64e21d 100644 --- a/airbyte-webapp/src/core/domain/connector/source.ts +++ b/airbyte-webapp/src/core/domain/connector/source.ts @@ -1,7 +1,7 @@ -import { SourceDefinition } from "core/resources/SourceDefinition"; import { ConnectorDefinition, ConnectorDefinitionSpecification, + SourceDefinition, SourceDefinitionSpecification, } from "./types"; diff --git a/airbyte-webapp/src/core/domain/connector/types.ts b/airbyte-webapp/src/core/domain/connector/types.ts index d87ae1ddcb2e..e7809dc39663 100644 --- a/airbyte-webapp/src/core/domain/connector/types.ts +++ b/airbyte-webapp/src/core/domain/connector/types.ts @@ -1,7 +1,28 @@ -import { ConnectionSpecification } from "core/domain/connection"; +import { + ConnectionConfiguration, + ConnectionSpecification, +} from "core/domain/connection"; import { DestinationSyncMode } from "core/domain/catalog"; -import { SourceDefinition } from "core/resources/SourceDefinition"; -import { DestinationDefinition } from "core/resources/DestinationDefinition"; + +export interface DestinationDefinition { + destinationDefinitionId: string; + name: string; + dockerRepository: string; + dockerImageTag: string; + latestDockerImageTag: string; + documentationUrl: string; + icon: string; +} + +export interface SourceDefinition { + sourceDefinitionId: string; + name: string; + dockerRepository: string; + dockerImageTag: string; + latestDockerImageTag: string; + documentationUrl: string; + icon: string; +} export type ConnectorDefinition = SourceDefinition | DestinationDefinition; @@ -71,3 +92,21 @@ export interface DestinationGetConsentPayload { workspaceId: string; oAuthInputConfiguration: Record; } + +export interface Source { + sourceId: string; + name: string; + sourceName: string; + workspaceId: string; + sourceDefinitionId: string; + connectionConfiguration: ConnectionConfiguration; +} + +export interface Destination { + destinationId: string; + name: string; + destinationName: string; + workspaceId: string; + destinationDefinitionId: string; + connectionConfiguration: ConnectionConfiguration; +} diff --git a/airbyte-webapp/src/core/domain/workspace/Workspace.ts b/airbyte-webapp/src/core/domain/workspace/Workspace.ts new file mode 100644 index 000000000000..75b962e71e0e --- /dev/null +++ b/airbyte-webapp/src/core/domain/workspace/Workspace.ts @@ -0,0 +1,22 @@ +export interface Notification { + notificationType: string; + sendOnSuccess: boolean; + sendOnFailure: boolean; + slackConfiguration: { + webhook: string; + }; +} + +export interface Workspace { + workspaceId: string; + customerId: string; + name: string; + email: string; + slug: string; + initialSetupComplete: boolean; + anonymousDataCollection: boolean; + news: boolean; + securityUpdates: boolean; + displaySetupWizard: boolean; + notifications: Notification[]; +} diff --git a/airbyte-webapp/src/core/resources/Connection.ts b/airbyte-webapp/src/core/resources/Connection.ts index 0081f58bf554..bebc4926d606 100644 --- a/airbyte-webapp/src/core/resources/Connection.ts +++ b/airbyte-webapp/src/core/resources/Connection.ts @@ -9,8 +9,6 @@ import { import { SyncSchema } from "core/domain/catalog"; import { CommonRequestError } from "core/request/CommonRequestError"; -import { Source } from "./Source"; -import { Destination } from "./Destination"; import BaseResource from "./BaseResource"; import { @@ -19,6 +17,7 @@ import { ScheduleProperties, Operation, } from "core/domain/connection"; +import { Destination, Source } from "core/domain/connector"; export type { Connection, ScheduleProperties }; diff --git a/airbyte-webapp/src/core/resources/Destination.tsx b/airbyte-webapp/src/core/resources/Destination.tsx index 0041cc65b7c7..fa7c1814ffa7 100644 --- a/airbyte-webapp/src/core/resources/Destination.tsx +++ b/airbyte-webapp/src/core/resources/Destination.tsx @@ -2,15 +2,7 @@ import { MutateShape, ReadShape, Resource, SchemaDetail } from "rest-hooks"; import { ConnectionConfiguration } from "core/domain/connection"; import BaseResource from "./BaseResource"; - -export interface Destination { - destinationId: string; - name: string; - destinationName: string; - workspaceId: string; - destinationDefinitionId: string; - connectionConfiguration: ConnectionConfiguration; -} +import { Destination } from "core/domain/connector"; export class DestinationResource extends BaseResource implements Destination { readonly destinationId: string = ""; diff --git a/airbyte-webapp/src/core/resources/DestinationDefinition.ts b/airbyte-webapp/src/core/resources/DestinationDefinition.ts index 834e54e87e9b..80d688fb97c4 100644 --- a/airbyte-webapp/src/core/resources/DestinationDefinition.ts +++ b/airbyte-webapp/src/core/resources/DestinationDefinition.ts @@ -4,16 +4,7 @@ import { getService } from "core/servicesProvider"; import BaseResource from "./BaseResource"; import { DestinationDefinitionService } from "core/domain/connector/DestinationDefinitionService"; - -export interface DestinationDefinition { - destinationDefinitionId: string; - name: string; - dockerRepository: string; - dockerImageTag: string; - latestDockerImageTag: string; - documentationUrl: string; - icon: string; -} +import { DestinationDefinition } from "../domain/connector"; export default class DestinationDefinitionResource extends BaseResource diff --git a/airbyte-webapp/src/core/resources/Source.tsx b/airbyte-webapp/src/core/resources/Source.tsx index 4af4b0b1e74e..c9744ceb0080 100644 --- a/airbyte-webapp/src/core/resources/Source.tsx +++ b/airbyte-webapp/src/core/resources/Source.tsx @@ -1,15 +1,7 @@ import { MutateShape, ReadShape, Resource, SchemaDetail } from "rest-hooks"; import BaseResource from "./BaseResource"; import { ConnectionConfiguration } from "core/domain/connection"; - -export interface Source { - sourceId: string; - name: string; - sourceName: string; - workspaceId: string; - sourceDefinitionId: string; - connectionConfiguration: ConnectionConfiguration; -} +import { Source } from "core/domain/connector"; export class SourceResource extends BaseResource implements Source { readonly sourceId: string = ""; diff --git a/airbyte-webapp/src/core/resources/SourceDefinition.ts b/airbyte-webapp/src/core/resources/SourceDefinition.ts index f5e785f73abe..4c71a70b4bd6 100644 --- a/airbyte-webapp/src/core/resources/SourceDefinition.ts +++ b/airbyte-webapp/src/core/resources/SourceDefinition.ts @@ -1,17 +1,9 @@ import { MutateShape, ReadShape, Resource, SchemaDetail } from "rest-hooks"; + import BaseResource from "./BaseResource"; import { getService } from "core/servicesProvider"; -import { SourceDefinitionService } from "../domain/connector/SourceDefinitionService"; - -export interface SourceDefinition { - sourceDefinitionId: string; - name: string; - dockerRepository: string; - dockerImageTag: string; - latestDockerImageTag: string; - documentationUrl: string; - icon: string; -} +import { SourceDefinitionService } from "core/domain/connector/SourceDefinitionService"; +import { SourceDefinition } from "core/domain/connector"; export default class SourceDefinitionResource extends BaseResource diff --git a/airbyte-webapp/src/core/resources/Workspace.ts b/airbyte-webapp/src/core/resources/Workspace.ts index ee7aba19b18e..e0d771eaa542 100644 --- a/airbyte-webapp/src/core/resources/Workspace.ts +++ b/airbyte-webapp/src/core/resources/Workspace.ts @@ -1,28 +1,7 @@ import { MutateShape, ReadShape, Resource, SchemaDetail } from "rest-hooks"; -import BaseResource from "./BaseResource"; - -export interface Notification { - notificationType: string; - sendOnSuccess: boolean; - sendOnFailure: boolean; - slackConfiguration: { - webhook: string; - }; -} -export interface Workspace { - workspaceId: string; - customerId: string; - name: string; - email: string; - slug: string; - initialSetupComplete: boolean; - anonymousDataCollection: boolean; - news: boolean; - securityUpdates: boolean; - displaySetupWizard: boolean; - notifications: Notification[]; -} +import BaseResource from "./BaseResource"; +import { Notification, Workspace } from "core/domain/workspace/Workspace"; export default class WorkspaceResource extends BaseResource @@ -63,6 +42,17 @@ export default class WorkspaceResource }; } + static getBySlug( + this: T + ): ReadShape> { + return { + ...super.detailShape(), + schema: this, + fetch: async (body: Readonly<{ slug: string }>): Promise => + await this.fetch("post", `${this.url(body)}/get_by_slug`, body), + }; + } + static updateShape( this: T ): MutateShape> { diff --git a/airbyte-webapp/src/hooks/services/Analytics/TrackPageAnalytics.tsx b/airbyte-webapp/src/hooks/services/Analytics/TrackPageAnalytics.tsx index 4d13737175d8..1e20d5a993b8 100644 --- a/airbyte-webapp/src/hooks/services/Analytics/TrackPageAnalytics.tsx +++ b/airbyte-webapp/src/hooks/services/Analytics/TrackPageAnalytics.tsx @@ -9,7 +9,8 @@ export const TrackPageAnalytics: React.FC = () => { const { pathname } = useRouter(); const analyticsService = useAnalyticsService(); useEffect(() => { - const pageName = getPageName(pathname); + const pathWithoutWorkspaceId = pathname.split("/").splice(2).join("."); + const pageName = getPageName(pathWithoutWorkspaceId); if (pageName) { analyticsService.page(pageName); } diff --git a/airbyte-webapp/src/hooks/services/Analytics/pageNameUtils.tsx b/airbyte-webapp/src/hooks/services/Analytics/pageNameUtils.tsx index cb8934d7d269..b108435e9e77 100644 --- a/airbyte-webapp/src/hooks/services/Analytics/pageNameUtils.tsx +++ b/airbyte-webapp/src/hooks/services/Analytics/pageNameUtils.tsx @@ -1,27 +1,28 @@ -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; +import { SettingsRoute } from "pages/SettingsPage/SettingsPage"; const getPageName = (pathname: string): string => { - const itemSourcePageRegex = new RegExp(`${Routes.Source}/.*`); - const itemDestinationPageRegex = new RegExp(`${Routes.Destination}/.*`); + const itemSourcePageRegex = new RegExp(`${RoutePaths.Source}/.*`); + const itemDestinationPageRegex = new RegExp(`${RoutePaths.Destination}/.*`); const itemSourceToDestinationPageRegex = new RegExp( - `(${Routes.Source}|${Routes.Destination})${Routes.Connection}/.*` + `(${RoutePaths.Source}|${RoutePaths.Destination})${RoutePaths.Connection}/.*` ); - if (pathname === Routes.Destination) { + if (pathname === RoutePaths.Destination) { return "Destinations Page"; } - if (pathname === Routes.Root) { + if (pathname === RoutePaths.Source) { return "Sources Page"; } - if (pathname === `${Routes.Source}${Routes.SourceNew}`) { + if (pathname === `${RoutePaths.Source}/${RoutePaths.SourceNew}`) { return "Create Source Page"; } - if (pathname === `${Routes.Destination}${Routes.DestinationNew}`) { + if (pathname === `${RoutePaths.Destination}/${RoutePaths.DestinationNew}`) { return "Create Destination Page"; } if ( - pathname === `${Routes.Source}${Routes.ConnectionNew}` || - pathname === `${Routes.Destination}${Routes.ConnectionNew}` + pathname === `${RoutePaths.Source}/${RoutePaths.ConnectionNew}` || + pathname === `${RoutePaths.Destination}/${RoutePaths.ConnectionNew}` ) { return "Create Connection Page"; } @@ -34,22 +35,22 @@ const getPageName = (pathname: string): string => { if (pathname.match(itemSourcePageRegex)) { return "Source Item Page"; } - if (pathname === `${Routes.Settings}${Routes.Source}`) { + if (pathname === `${RoutePaths.Settings}/${SettingsRoute.Source}`) { return "Settings Sources Connectors Page"; } - if (pathname === `${Routes.Settings}${Routes.Destination}`) { + if (pathname === `${RoutePaths.Settings}/${SettingsRoute.Destination}`) { return "Settings Destinations Connectors Page"; } - if (pathname === `${Routes.Settings}${Routes.Configuration}`) { + if (pathname === `${RoutePaths.Settings}/${SettingsRoute.Configuration}`) { return "Settings Configuration Page"; } - if (pathname === `${Routes.Settings}${Routes.Notifications}`) { + if (pathname === `${RoutePaths.Settings}/${SettingsRoute.Notifications}`) { return "Settings Notifications Page"; } - if (pathname === `${Routes.Settings}${Routes.Metrics}`) { + if (pathname === `${RoutePaths.Settings}/${SettingsRoute.Metrics}`) { return "Settings Metrics Page"; } - if (pathname === Routes.Connections) { + if (pathname === RoutePaths.Connections) { return "Connections Page"; } diff --git a/airbyte-webapp/src/hooks/services/Analytics/useAnalyticsService.tsx b/airbyte-webapp/src/hooks/services/Analytics/useAnalyticsService.tsx index b3a377f32267..8d71e941b26b 100644 --- a/airbyte-webapp/src/hooks/services/Analytics/useAnalyticsService.tsx +++ b/airbyte-webapp/src/hooks/services/Analytics/useAnalyticsService.tsx @@ -55,9 +55,7 @@ function AnalyticsServiceProvider({ } export const useAnalyticsService = (): AnalyticsService => { - const analyticsService = useAnalytics(); - - return analyticsService.service; + return useAnalytics().service; }; export const useAnalytics = (): AnalyticsServiceProviderValue => { @@ -82,6 +80,14 @@ export const useAnalyticsIdentifyUser = (userId?: string): void => { }, [userId]); }; +export const useTrackPage = (page: string): void => { + const analyticsService = useAnalyticsService(); + + useEffect(() => { + analyticsService.page(page); + }, [analyticsService, page]); +}; + export const useAnalyticsRegisterValues = ( props?: AnalyticsContext | null ): void => { diff --git a/airbyte-webapp/src/hooks/services/Onboarding/OnboardingService.tsx b/airbyte-webapp/src/hooks/services/Onboarding/OnboardingService.tsx index 951eef8fcf02..226a968c92ef 100644 --- a/airbyte-webapp/src/hooks/services/Onboarding/OnboardingService.tsx +++ b/airbyte-webapp/src/hooks/services/Onboarding/OnboardingService.tsx @@ -1,6 +1,7 @@ import React, { useContext, useMemo } from "react"; import { useLocalStorage } from "react-use"; -import useWorkspace from "hooks/services/useWorkspace"; + +import { useCurrentWorkspace } from "hooks/services/useWorkspace"; import casesConfig from "config/casesConfig.json"; type Context = { @@ -15,7 +16,7 @@ export const OnboardingServiceContext = React.createContext( ); export const OnboardingServiceProvider: React.FC = ({ children }) => { - const { workspace } = useWorkspace(); + const workspace = useCurrentWorkspace(); const [feedbackPassed, setFeedbackPassed] = useLocalStorage( `${workspace.workspaceId}/passFeedback`, false diff --git a/airbyte-webapp/src/hooks/services/useConnectionHook.tsx b/airbyte-webapp/src/hooks/services/useConnectionHook.tsx index f7e6bc68f169..bf157981da02 100644 --- a/airbyte-webapp/src/hooks/services/useConnectionHook.tsx +++ b/airbyte-webapp/src/hooks/services/useConnectionHook.tsx @@ -14,10 +14,7 @@ import ConnectionResource, { ScheduleProperties, } from "core/resources/Connection"; import { SyncSchema } from "core/domain/catalog"; -import { SourceDefinition } from "core/resources/SourceDefinition"; -import { Source } from "core/resources/Source"; -import { Routes } from "pages/routes"; -import { Destination } from "core/resources/Destination"; +import { RoutePaths } from "pages/routes"; import useWorkspace from "./useWorkspace"; import { Operation } from "core/domain/connection/operation"; import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; @@ -26,6 +23,7 @@ import { useGetService } from "core/servicesProvider"; import { RequestMiddleware } from "core/request/RequestMiddleware"; import { equal } from "utils/objects"; +import { Destination, Source, SourceDefinition } from "core/domain/connector"; export type ValuesProps = { schedule: ScheduleProperties | null; @@ -198,7 +196,7 @@ const useConnection = (): { await updateConnectionsStore({ workspaceId: workspace.workspaceId }); - push(Routes.Connections); + push(RoutePaths.Connections); }; const updateConnection = async ({ diff --git a/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx b/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx index 1885fbef8ce4..1ff186901fc7 100644 --- a/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx +++ b/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx @@ -181,7 +181,7 @@ export function useRunOauthFlow( }; } -export function useResolveRedirect(): void { +export function useResolveNavigate(): void { const { query } = useRouter(); useEffectOnce(() => { diff --git a/airbyte-webapp/src/hooks/services/useDestinationDefinition.tsx b/airbyte-webapp/src/hooks/services/useDestinationDefinition.tsx index b1be1091b692..4c3a16e949be 100644 --- a/airbyte-webapp/src/hooks/services/useDestinationDefinition.tsx +++ b/airbyte-webapp/src/hooks/services/useDestinationDefinition.tsx @@ -1,9 +1,8 @@ import { useResource } from "rest-hooks"; -import DestinationDefinitionResource, { - DestinationDefinition, -} from "core/resources/DestinationDefinition"; +import DestinationDefinitionResource from "core/resources/DestinationDefinition"; import useWorkspace from "./useWorkspace"; +import { DestinationDefinition } from "core/domain/connector"; const useDestinationDefinitionList = (): { destinationDefinitions: DestinationDefinition[]; diff --git a/airbyte-webapp/src/hooks/services/useDestinationHook.tsx b/airbyte-webapp/src/hooks/services/useDestinationHook.tsx index 0836e5bd24b9..83b848545140 100644 --- a/airbyte-webapp/src/hooks/services/useDestinationHook.tsx +++ b/airbyte-webapp/src/hooks/services/useDestinationHook.tsx @@ -2,16 +2,19 @@ import { useCallback } from "react"; import { useFetcher, useResource } from "rest-hooks"; import { useStatefulResource } from "@rest-hooks/legacy"; -import DestinationResource, { Destination } from "core/resources/Destination"; +import DestinationResource from "core/resources/Destination"; import ConnectionResource, { Connection } from "core/resources/Connection"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import useRouter from "../useRouter"; import DestinationDefinitionSpecificationResource from "core/resources/DestinationDefinitionSpecification"; import SchedulerResource, { Scheduler } from "core/resources/Scheduler"; import { ConnectionConfiguration } from "core/domain/connection"; import useWorkspace from "./useWorkspace"; import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; -import { DestinationDefinitionSpecification } from "core/domain/connector"; +import { + Destination, + DestinationDefinitionSpecification, +} from "core/domain/connector"; type ValuesProps = { name: string; @@ -284,7 +287,7 @@ const useDestination = (): DestinationService => { updateConnectionsStore({ connectionId: item.connectionId }, undefined) ); - push(Routes.Destination); + push(RoutePaths.Destination); }; return { diff --git a/airbyte-webapp/src/hooks/services/useDocumentation.ts b/airbyte-webapp/src/hooks/services/useDocumentation.ts index 9e69b3c26bd2..8d0ad3f8a8b3 100644 --- a/airbyte-webapp/src/hooks/services/useDocumentation.ts +++ b/airbyte-webapp/src/hooks/services/useDocumentation.ts @@ -15,8 +15,14 @@ const useDocumentation = (documentationUrl: string): UseDocumentationResult => { const { integrationUrl } = useConfig(); const url = documentationUrl.replace(DOCS_URL, integrationUrl) + ".md"; - return useQuery(documentationKeys.text(documentationUrl), () => - fetchDocumentation(url) + return useQuery( + documentationKeys.text(documentationUrl), + () => fetchDocumentation(url), + { + enabled: !!documentationUrl, + refetchOnMount: false, + refetchOnWindowFocus: false, + } ); }; diff --git a/airbyte-webapp/src/hooks/services/useSourceDefinition.tsx b/airbyte-webapp/src/hooks/services/useSourceDefinition.tsx index 22c13f05f195..4369064b48e0 100644 --- a/airbyte-webapp/src/hooks/services/useSourceDefinition.tsx +++ b/airbyte-webapp/src/hooks/services/useSourceDefinition.tsx @@ -1,9 +1,8 @@ import { useResource } from "rest-hooks"; -import SourceDefinitionResource, { - SourceDefinition, -} from "core/resources/SourceDefinition"; +import SourceDefinitionResource from "core/resources/SourceDefinition"; import useWorkspace from "./useWorkspace"; +import { SourceDefinition } from "core/domain/connector"; const useSourceDefinitionList = (): { sourceDefinitions: SourceDefinition[]; diff --git a/airbyte-webapp/src/hooks/services/useSourceHook.tsx b/airbyte-webapp/src/hooks/services/useSourceHook.tsx index fbb920eb1a08..563d63b921f1 100644 --- a/airbyte-webapp/src/hooks/services/useSourceHook.tsx +++ b/airbyte-webapp/src/hooks/services/useSourceHook.tsx @@ -2,8 +2,8 @@ import { useCallback } from "react"; import { useFetcher, useResource } from "rest-hooks"; import { useStatefulResource } from "@rest-hooks/legacy"; -import SourceResource, { Source } from "core/resources/Source"; -import { Routes } from "pages/routes"; +import SourceResource from "core/resources/Source"; +import { RoutePaths } from "pages/routes"; import ConnectionResource, { Connection } from "core/resources/Connection"; import SourceDefinitionSpecificationResource from "core/resources/SourceDefinitionSpecification"; import SchedulerResource, { Scheduler } from "core/resources/Scheduler"; @@ -12,7 +12,7 @@ import useWorkspace from "./useWorkspace"; import useRouter from "hooks/useRouter"; import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; -import { SourceDefinitionSpecification } from "core/domain/connector"; +import { Source, SourceDefinitionSpecification } from "core/domain/connector"; type ValuesProps = { name: string; @@ -233,7 +233,7 @@ const useSource = (): SourceService => { updateConnectionsStore({ connectionId: item.connectionId }, undefined) ); - push(Routes.Root); + push(RoutePaths.Source); }; return { diff --git a/airbyte-webapp/src/hooks/services/useWorkspace.tsx b/airbyte-webapp/src/hooks/services/useWorkspace.tsx index a85b071bcd92..2261c2ce7ed8 100644 --- a/airbyte-webapp/src/hooks/services/useWorkspace.tsx +++ b/airbyte-webapp/src/hooks/services/useWorkspace.tsx @@ -1,27 +1,14 @@ -import { useFetcher, useResource } from "rest-hooks"; +import { useFetcher } from "rest-hooks"; -import WorkspaceResource, { Workspace } from "core/resources/Workspace"; +import WorkspaceResource from "core/resources/Workspace"; import NotificationsResource, { Notifications, } from "core/resources/Notifications"; -import { useGetService } from "core/servicesProvider"; -import { useAnalyticsService } from "./Analytics"; -import { Source } from "core/resources/Source"; -import { Destination } from "core/resources/Destination"; -export const usePickFirstWorkspace = (): Workspace => { - const { workspaces } = useResource(WorkspaceResource.listShape(), {}); - - return workspaces[0]; -}; - -const useCurrentWorkspace = (): Workspace => { - const workspaceProviderService = useGetService<() => Workspace>( - "currentWorkspaceProvider" - ); - - return workspaceProviderService(); -}; +import { useAnalyticsService } from "hooks/services/Analytics"; +import { useCurrentWorkspace } from "services/workspaces/WorkspacesService"; +import { Destination, Source } from "core/domain/connector"; +import { Workspace } from "core/domain/workspace/Workspace"; export type WebhookPayload = { webhook: string; @@ -105,8 +92,8 @@ const useWorkspace = (): { anonymousDataCollection: boolean; news: boolean; securityUpdates: boolean; - }) => - await updateWorkspace( + }) => { + const result = await updateWorkspace( {}, { workspaceId: workspace.workspaceId, @@ -116,6 +103,16 @@ const useWorkspace = (): { } ); + analyticsService.track("Specified Preferences", { + email: data.email, + anonymized: data.anonymousDataCollection, + subscribed_newsletter: data.news, + subscribed_security: data.securityUpdates, + }); + + return result; + }; + const updatePreferences = async (data: { email?: string; anonymousDataCollection: boolean; diff --git a/airbyte-webapp/src/hooks/useRouter.tsx b/airbyte-webapp/src/hooks/useRouter.tsx index bfb28ada7f2d..1516c4b91d0c 100644 --- a/airbyte-webapp/src/hooks/useRouter.tsx +++ b/airbyte-webapp/src/hooks/useRouter.tsx @@ -1,31 +1,27 @@ import { useMemo } from "react"; import { - useHistory, useLocation, + useNavigate, useParams, - useRouteMatch, -} from "react-router"; -import { match } from "react-router-dom"; + Location, + To, + NavigateOptions, +} from "react-router-dom"; import queryString from "query-string"; -import { Location, LocationDescriptor, Path, History } from "history"; // eslint-disable-next-line @typescript-eslint/no-explicit-any -function useRouter(): { +function useRouter(): { query: T; + params: P; pathname: string; - location: Location; - push(path: Path, state?: History.UnknownFacade | null | undefined): void; - push(location: LocationDescriptor): void; - replace(path: Path, state?: History.UnknownFacade | null | undefined): void; - replace(location: LocationDescriptor): void; - history: History; - match: match; + location: Location; + push(path: To, state?: NavigateOptions): void; + replace(path: To, state?: NavigateOptions): void; } { - const params = useParams(); - const location = useLocation(); - const history = useHistory(); - const match = useRouteMatch(); + const params: any = useParams(); + const location = useLocation(); + const navigate = useNavigate(); const query = useMemo( () => ({ @@ -37,15 +33,14 @@ function useRouter(): { return useMemo(() => { return { - push: history.push, - replace: history.replace, + params, + push: navigate, + replace: (path, state) => navigate(path, { ...state, replace: true }), pathname: location.pathname, query, - match, location, - history, }; - }, [match, location, history, query]); + }, [navigate, location, query, params]); } export default useRouter; diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index 41a016b5f2a1..5b56ecb2afb7 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -429,5 +429,8 @@ "credits.totalUsage": "Total usage", "demo.message.title": "This Airbyte demo is read-only", - "demo.message.body": "You cannot add or edit any connectors. You will see error messages on purpose if you try to do so." + "demo.message.body": "You cannot add or edit any connectors. You will see error messages on purpose if you try to do so.", + + "errorView.startOver": "Continue using the app", + "errorView.notFound": "Resource not found" } diff --git a/airbyte-webapp/src/packages/cloud/App.tsx b/airbyte-webapp/src/packages/cloud/App.tsx index 0a8117e96e43..2a4b5d97e3c2 100644 --- a/airbyte-webapp/src/packages/cloud/App.tsx +++ b/airbyte-webapp/src/packages/cloud/App.tsx @@ -9,7 +9,7 @@ import cloudLocales from "packages/cloud/locales/en.json"; import GlobalStyle from "global-styles"; import { theme } from "packages/cloud/theme"; -import { Routing } from "packages/cloud/routes"; +import { Routing } from "packages/cloud/cloudRoutes"; import LoadingPage from "components/LoadingPage"; import ApiErrorBoundary from "components/ApiErrorBoundary"; import NotificationServiceProvider from "hooks/services/Notification"; @@ -35,7 +35,13 @@ const StyleProvider: React.FC = ({ children }) => ( ); -const queryClient = new QueryClient(); +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + suspense: true, + }, + }, +}); const StoreProvider: React.FC = ({ children }) => ( @@ -43,6 +49,24 @@ const StoreProvider: React.FC = ({ children }) => ( ); +const Services: React.FC = ({ children }) => ( + + + + + + + + {children} + + + + + + + +); + const App: React.FC = () => { return ( @@ -50,23 +74,9 @@ const App: React.FC = () => { }> - - - - - - - - - - - - - - - - - + + + diff --git a/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx new file mode 100644 index 000000000000..89ccebf8732b --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx @@ -0,0 +1,198 @@ +import React, { Suspense, useMemo } from "react"; +import { + BrowserRouter as Router, + Navigate, + Route, + Routes, +} from "react-router-dom"; + +import SourcesPage from "pages/SourcesPage"; +import DestinationPage from "pages/DestinationPage"; +import ConnectionPage from "pages/ConnectionPage"; + +import LoadingPage from "components/LoadingPage"; +import MainView from "packages/cloud/views/layout/MainView"; +import { WorkspacesPage } from "packages/cloud/views/workspaces"; +import { useApiHealthPoll } from "hooks/services/Health"; +import { Auth } from "packages/cloud/views/auth"; +import { useAuthService } from "packages/cloud/services/auth/AuthService"; +import { useIntercom } from "packages/cloud/services/thirdParty/intercom/useIntercom"; +import { + useCurrentWorkspace, + WorkspaceServiceProvider, +} from "services/workspaces/WorkspacesService"; +import OnboardingPage from "pages/OnboardingPage"; +import { CreditsPage } from "packages/cloud/views/credits"; +import { ConfirmEmailPage } from "./views/auth/ConfirmEmailPage"; +import { TrackPageAnalytics } from "hooks/services/Analytics/TrackPageAnalytics"; +import { CompleteOauthRequest } from "views/CompleteOauthRequest"; +import { OnboardingServiceProvider } from "hooks/services/Onboarding"; +import { useConfig } from "./services/config"; +import useFullStory from "./services/thirdParty/fullstory/useFullStory"; +import { + useAnalyticsIdentifyUser, + useAnalyticsRegisterValues, +} from "hooks/services/Analytics/useAnalyticsService"; +import { CloudSettingsPage } from "./views/settings/CloudSettingsPage"; +import { VerifyEmailAction } from "./views/FirebaseActionRoute"; +import { RoutePaths } from "pages/routes"; +import useRouter from "hooks/useRouter"; + +export const CloudRoutes = { + Root: "/", + AuthFlow: "/auth_flow", + + Metrics: "metrics", + SelectWorkspace: "workspaces", + Credits: "credits", + + // Auth routes + Signup: "/signup", + Login: "/login", + ResetPassword: "/reset-password", + ConfirmVerifyEmail: "/confirm-verify-email", + + // Firebase action routes + // These URLs come from Firebase emails, and all have the same + // action URL ("/verify-email") with different "mode" parameter + // TODO: use a better action URL in Firebase email template + FirebaseAction: "/verify-email", +} as const; + +const MainRoutes: React.FC = () => { + const workspace = useCurrentWorkspace(); + + const analyticsContext = useMemo( + () => ({ + workspace_id: workspace.workspaceId, + customer_id: workspace.customerId, + }), + [workspace] + ); + useAnalyticsRegisterValues(analyticsContext); + + const mainNavigate = workspace.displaySetupWizard + ? RoutePaths.Onboarding + : RoutePaths.Connections; + + return ( + + } + /> + } /> + } + /> + } + /> + } /> + + {workspace.displaySetupWizard && ( + + + + } + /> + )} + } /> + + ); +}; + +const MainViewRoutes = () => { + useApiHealthPoll(); + useIntercom(); + const { location } = useRouter(); + + return ( + + {[CloudRoutes.Login, CloudRoutes.Signup, CloudRoutes.FirebaseAction].map( + (r) => ( + + } + /> + ) + )} + } /> + } /> + + + + } + /> + } + /> + + ); +}; + +export const Routing: React.FC = () => { + const { user, inited, emailVerified } = useAuthService(); + const config = useConfig(); + useFullStory(config.fullstory, config.fullstory.enabled); + + const analyticsContext = useMemo( + () => + user + ? { + cloud_user_id: user.userId, + } + : null, + [user] + ); + useAnalyticsRegisterValues(analyticsContext); + useAnalyticsIdentifyUser(user?.userId); + + if (!inited) { + return ; + } + + return ( + + + + }> + {!user && } + {user && emailVerified && } + {user && !emailVerified && ( + + } + /> + } + /> + + } + /> + + )} + + + + ); +}; diff --git a/airbyte-webapp/src/packages/cloud/routes.tsx b/airbyte-webapp/src/packages/cloud/routes.tsx deleted file mode 100644 index 9f253cb69456..000000000000 --- a/airbyte-webapp/src/packages/cloud/routes.tsx +++ /dev/null @@ -1,206 +0,0 @@ -import React, { Suspense, useMemo } from "react"; -import { - BrowserRouter as Router, - Redirect, - Route, - Switch, -} from "react-router-dom"; - -import SourcesPage from "pages/SourcesPage"; -import DestinationPage from "pages/DestinationPage"; -import ConnectionPage from "pages/ConnectionPage"; - -import LoadingPage from "components/LoadingPage"; -import MainView from "packages/cloud/views/layout/MainView"; -import { WorkspacesPage } from "packages/cloud/views/workspaces"; -import { useApiHealthPoll } from "hooks/services/Health"; -import { Auth } from "packages/cloud/views/auth"; -import { useAuthService } from "packages/cloud/services/auth/AuthService"; -import { useIntercom } from "packages/cloud/services/thirdParty/intercom/useIntercom"; - -import { - useGetWorkspace, - useWorkspaceService, - WorkspaceServiceProvider, -} from "packages/cloud/services/workspaces/WorkspacesService"; -import OnboardingPage from "pages/OnboardingPage"; -import { CreditsPage } from "packages/cloud/views/credits"; -import { ConfirmEmailPage } from "./views/auth/ConfirmEmailPage"; -import { TrackPageAnalytics } from "hooks/services/Analytics/TrackPageAnalytics"; -import useWorkspace from "hooks/services/useWorkspace"; -import { CompleteOauthRequest } from "views/CompleteOauthRequest"; -import { OnboardingServiceProvider } from "hooks/services/Onboarding"; -import { useConfig } from "./services/config"; -import useFullStory from "./services/thirdParty/fullstory/useFullStory"; -import { - useAnalyticsIdentifyUser, - useAnalyticsRegisterValues, -} from "hooks/services/Analytics/useAnalyticsService"; -import { CloudSettingsPage } from "./views/CloudSettingsPage"; -import { VerifyEmailAction } from "./views/FirebaseActionRoute"; - -export enum Routes { - Preferences = "/preferences", - Onboarding = "/onboarding", - - Connections = "/connections", - Destination = "/destination", - Source = "/source", - Workspace = "/workspaces", - Connection = "/connection", - ConnectionNew = "/new-connection", - SourceNew = "/new-source", - DestinationNew = "/new-destination", - Settings = "/settings", - Metrics = "/metrics", - Account = "/account", - AuthFlow = "/auth_flow", - Root = "/", - SelectWorkspace = "/workspaces", - Configuration = "/configuration", - AccessManagement = "/access-management", - Notifications = "/notifications", - Credits = "/credits", - - // Auth routes - Signup = "/signup", - Login = "/login", - ResetPassword = "/reset-password", - ConfirmVerifyEmail = "/confirm-verify-email", - - // Firebase action routes - // These URLs come from Firebase emails, and all have the same - // action URL ("/verify-email") with different "mode" parameter - // TODO: use a better action URL in Firebase email template - FirebaseAction = "/verify-email", -} - -const MainRoutes: React.FC<{ currentWorkspaceId: string }> = ({ - currentWorkspaceId, -}) => { - useGetWorkspace(currentWorkspaceId); - const { workspace } = useWorkspace(); - - const analyticsContext = useMemo( - () => ({ - workspace_id: workspace.workspaceId, - customer_id: workspace.customerId, - }), - [workspace] - ); - useAnalyticsRegisterValues(analyticsContext); - - const mainRedirect = workspace.displaySetupWizard - ? Routes.Onboarding - : Routes.Connections; - - return ( - - - - - - - - - - - - - - - - - - - {workspace.displaySetupWizard && ( - - - - - - )} - - - ); -}; - -const MainViewRoutes = () => { - useApiHealthPoll(); - useIntercom(); - - const { currentWorkspaceId } = useWorkspaceService(); - - return ( - - - - - - {currentWorkspaceId ? ( - - }> - - - - ) : ( - - - - - - - )} - - - ); -}; - -export const Routing: React.FC = () => { - const { user, inited, emailVerified } = useAuthService(); - const config = useConfig(); - useFullStory(config.fullstory, config.fullstory.enabled); - - const analyticsContext = useMemo( - () => - user - ? { - cloud_user_id: user.userId, - } - : null, - [user] - ); - useAnalyticsRegisterValues(analyticsContext); - useAnalyticsIdentifyUser(user?.userId); - - return ( - - - }> - {inited ? ( - <> - {user && emailVerified && ( - - - - )} - {user && !emailVerified && ( - - - - - - - - - - )} - {!user && } - - ) : ( - - )} - - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/services/AppServicesProvider.tsx b/airbyte-webapp/src/packages/cloud/services/AppServicesProvider.tsx index 9574921636fb..fccab52665c4 100644 --- a/airbyte-webapp/src/packages/cloud/services/AppServicesProvider.tsx +++ b/airbyte-webapp/src/packages/cloud/services/AppServicesProvider.tsx @@ -1,5 +1,4 @@ import React, { useMemo } from "react"; -import { useResource } from "rest-hooks"; import { useAuth } from "packages/firebaseReact"; @@ -8,30 +7,14 @@ import { useGetService, useInjectServices, } from "core/servicesProvider"; -import { useApiServices } from "core/defaultServices"; +import { ApiServices } from "core/ApiServices"; import { FirebaseSdkProvider } from "./FirebaseSdkProvider"; - -import { useWorkspaceService } from "./workspaces/WorkspacesService"; -import WorkspaceResource, { Workspace } from "core/resources/Workspace"; import { RequestAuthMiddleware } from "packages/cloud/lib/auth/RequestAuthMiddleware"; import { useConfig } from "./config"; import { UserService } from "packages/cloud/lib/domain/users"; import { RequestMiddleware } from "core/request/RequestMiddleware"; import { LoadingPage } from "components"; -const useCurrentWorkspaceProvider = (): Workspace => { - const { currentWorkspaceId } = useWorkspaceService(); - const workspace = useResource(WorkspaceResource.detailShape(), { - workspaceId: currentWorkspaceId || null, - }); - - return workspace; -}; - -const services = { - currentWorkspaceProvider: useCurrentWorkspaceProvider, -}; - /** * This Provider is main services entrypoint * It initializes all required services for app to work @@ -39,7 +22,7 @@ const services = { */ const AppServicesProvider: React.FC = ({ children }) => { return ( - + {children} @@ -72,11 +55,14 @@ const ServiceOverrides: React.FC = React.memo(({ children }) => { ); useInjectServices(inject); - useApiServices(); const registeredMiddlewares = useGetService("DefaultRequestMiddlewares"); - return registeredMiddlewares ? <>{children} : ; + return ( + + {registeredMiddlewares ? <>{children} : } + + ); }); export { AppServicesProvider }; diff --git a/airbyte-webapp/src/packages/cloud/services/auth/AuthService.tsx b/airbyte-webapp/src/packages/cloud/services/auth/AuthService.tsx index 110b1243989c..0ccb0c41e75c 100644 --- a/airbyte-webapp/src/packages/cloud/services/auth/AuthService.tsx +++ b/airbyte-webapp/src/packages/cloud/services/auth/AuthService.tsx @@ -1,5 +1,6 @@ import React, { useContext, useEffect, useMemo } from "react"; import { useQueryClient } from "react-query"; +import { useResetter } from "rest-hooks"; import { GoogleAuthService } from "packages/cloud/lib/auth/GoogleAuthService"; import useTypesafeReducer from "hooks/useTypesafeReducer"; @@ -79,8 +80,6 @@ export const AuthenticationProvider: React.FC = ({ children }) => { useEffect(() => { return auth.onAuthStateChanged(async (currentUser) => { if (state.currentUser === null && currentUser) { - // token = await currentUser.getIdToken(); - let user: User | undefined; try { @@ -109,6 +108,7 @@ export const AuthenticationProvider: React.FC = ({ children }) => { }, [state.currentUser, loggedIn, authInited]); const queryClient = useQueryClient(); + const reset = useResetter(); const ctx: AuthContextApi = useMemo( () => ({ @@ -122,6 +122,7 @@ export const AuthenticationProvider: React.FC = ({ children }) => { await authService.signOut(); loggedOut(); await queryClient.invalidateQueries(); + await reset(); }, async updateEmail(email, password): Promise { await userService.changeEmail(email); diff --git a/airbyte-webapp/src/packages/cloud/services/workspaces/WorkspacesService.tsx b/airbyte-webapp/src/packages/cloud/services/workspaces/WorkspacesService.tsx index ed9b79c256bd..b3091891eb40 100644 --- a/airbyte-webapp/src/packages/cloud/services/workspaces/WorkspacesService.tsx +++ b/airbyte-webapp/src/packages/cloud/services/workspaces/WorkspacesService.tsx @@ -1,67 +1,65 @@ -import React, { useContext, useMemo } from "react"; -import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useLocalStorage } from "react-use"; -import { useResetter } from "rest-hooks"; +import { + QueryObserverResult, + useMutation, + useQuery, + useQueryClient, +} from "react-query"; + +import type { + CloudWorkspace, + CloudWorkspaceUsage, +} from "packages/cloud/lib/domain/cloudWorkspaces/types"; import { CloudWorkspacesService } from "packages/cloud/lib/domain/cloudWorkspaces/CloudWorkspacesService"; import { useCurrentUser } from "packages/cloud/services/auth/AuthService"; -import { useDefaultRequestMiddlewares } from "packages/cloud/services/useDefaultRequestMiddlewares"; -import { CloudWorkspace } from "packages/cloud/lib/domain/cloudWorkspaces/types"; import { useConfig } from "packages/cloud/services/config"; - -type Context = { - currentWorkspaceId?: string | null; - selectWorkspace: (workspaceId: string | null) => void; - createWorkspace: (name: string) => Promise; - updateWorkspace: { - mutateAsync: (payload: { - workspaceId: string; - name: string; - }) => Promise; - isLoading: boolean; - }; - removeWorkspace: { - mutateAsync: (workspaceId: string) => Promise; - isLoading: boolean; - }; +import { useDefaultRequestMiddlewares } from "packages/cloud/services/useDefaultRequestMiddlewares"; +import { useInitService } from "./useInitService"; +import { QueryObserverSuccessResult } from "react-query/types/core/types"; + +export const workspaceKeys = { + all: ["cloud_workspaces"] as const, + lists: () => [...workspaceKeys.all, "list"] as const, + list: (filters: string) => [...workspaceKeys.lists(), { filters }] as const, + details: () => [...workspaceKeys.all, "detail"] as const, + detail: (id: number | string) => [...workspaceKeys.details(), id] as const, + usage: (id: number | string) => + [...workspaceKeys.details(), id, "usage"] as const, }; -export const WorkspaceServiceContext = React.createContext( - null -); +function useGetWorkspaceService(): CloudWorkspacesService { + const { cloudApiUrl } = useConfig(); -function useGetWorkspaceService() { const requestAuthMiddleware = useDefaultRequestMiddlewares(); - const { cloudApiUrl } = useConfig(); - return useMemo( + return useInitService( () => new CloudWorkspacesService(cloudApiUrl, requestAuthMiddleware), - [requestAuthMiddleware, cloudApiUrl] + [cloudApiUrl, requestAuthMiddleware] ); } -export function useListWorkspaces() { +export function useListCloudWorkspaces(): CloudWorkspace[] { const service = useGetWorkspaceService(); const user = useCurrentUser(); - return useQuery("workspaces", () => service.listByUser(user.userId), { - suspense: true, - }); + return (useQuery(workspaceKeys.lists(), () => + service.listByUser(user.userId) + ) as QueryObserverSuccessResult).data; } export function useCreateWorkspace() { const service = useGetWorkspaceService(); const queryClient = useQueryClient(); + const user = useCurrentUser(); return useMutation( - async (payload: { name: string; userId: string }) => - service.create(payload), + async (name: string) => service.create({ name, userId: user.userId }), { onSuccess: (result) => { - queryClient.setQueryData("workspaces", (old) => [ - ...(old ?? []), - result, - ]); + queryClient.setQueryData( + workspaceKeys.lists(), + (old) => [...(old ?? []), result] + ); }, } ).mutateAsync; @@ -76,25 +74,28 @@ export function useUpdateWorkspace() { service.update(payload.workspaceId, { name: payload.name }), { onSuccess: (result) => { - queryClient.setQueryData("workspaces", (old) => { - const list = old ?? []; - if (list.length === 0) { - return [result]; - } + queryClient.setQueryData( + workspaceKeys.lists(), + (old) => { + const list = old ?? []; + if (list.length === 0) { + return [result]; + } - const index = list.findIndex( - (item) => item.workspaceId === result.workspaceId - ); + const index = list.findIndex( + (item) => item.workspaceId === result.workspaceId + ); - if (index === -1) { - return list; - } + if (index === -1) { + return list; + } - return [...list.slice(0, index), result, ...list.slice(index + 1)]; - }); + return [...list.slice(0, index), result, ...list.slice(index + 1)]; + } + ); queryClient.setQueryData( - ["workspace", result.workspaceId], + [workspaceKeys.detail(result.workspaceId)], (old) => { return { ...old, @@ -116,7 +117,7 @@ export function useRemoveWorkspace() { { onSuccess: (_, workspaceId) => { queryClient.setQueryData( - "workspaces", + workspaceKeys.lists(), (old) => old?.filter((workspace) => workspace.workspaceId !== workspaceId) ); @@ -125,88 +126,22 @@ export function useRemoveWorkspace() { ); } -export function useGetWorkspace(workspaceId: string) { +export function useGetCloudWorkspace(workspaceId: string): CloudWorkspace { const service = useGetWorkspaceService(); - return useQuery( - ["workspace", workspaceId], - () => service.get(workspaceId), - { - suspense: true, - initialData: { - workspaceId: "", - name: "", - billingUserId: "", - remainingCredits: 0, - }, - } - ) as any; + return (useQuery([workspaceKeys.detail(workspaceId)], () => + service.get(workspaceId) + ) as QueryObserverSuccessResult).data; } -export function useGetUsage(workspaceId: string) { +export function useGetUsage( + workspaceId: string +): QueryObserverResult { const service = useGetWorkspaceService(); - return useQuery( - ["cloud_workspace", workspaceId, "usage"], - () => service.getUsage(workspaceId), - { - suspense: true, - } + return useQuery([workspaceKeys.usage(workspaceId)], () => + service.getUsage(workspaceId) ); } -export const WorkspaceServiceProvider: React.FC = ({ children }) => { - const user = useCurrentUser(); - const [currentWorkspaceId, setCurrentWorkspaceId] = useLocalStorage< - string | null - >(`${user.userId}/workspaceId`, null); - - const createWorkspace = useCreateWorkspace(); - const removeWorkspace = useRemoveWorkspace(); - const updateWorkspace = useUpdateWorkspace(); - - const queryClient = useQueryClient(); - const resetCache = useResetter(); - - const ctx = useMemo( - () => ({ - currentWorkspaceId, - createWorkspace: async (name: string) => - await createWorkspace({ - name, - userId: user.userId, - }), - removeWorkspace, - updateWorkspace, - selectWorkspace: async (workspaceId) => { - setCurrentWorkspaceId(workspaceId); - await queryClient.resetQueries(); - resetCache(); - }, - }), - [ - currentWorkspaceId, - user, - createWorkspace, - removeWorkspace, - updateWorkspace, - ] - ); - - return ( - - {children} - - ); -}; - -export const useWorkspaceService = (): Context => { - const workspaceService = useContext(WorkspaceServiceContext); - if (!workspaceService) { - throw new Error( - "useWorkspaceService must be used within a WorkspaceServiceProvider." - ); - } - - return workspaceService; -}; +export { useWorkspaceService } from "services/workspaces/WorkspacesService"; diff --git a/airbyte-webapp/src/packages/cloud/services/workspaces/useInitService.tsx b/airbyte-webapp/src/packages/cloud/services/workspaces/useInitService.tsx new file mode 100644 index 000000000000..327764fcc1d6 --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/services/workspaces/useInitService.tsx @@ -0,0 +1,14 @@ +import { useEffect, useRef } from "react"; + +export function useInitService any>( + f: () => InstanceType, + deps: ConstructorParameters +): InstanceType { + const service = useRef>(f()); + + useEffect(() => { + service.current = f(); + }, deps); + + return service.current; +} diff --git a/airbyte-webapp/src/packages/cloud/views/auth/Auth.tsx b/airbyte-webapp/src/packages/cloud/views/auth/Auth.tsx index 00f60d30be42..d72e02281434 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/Auth.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/Auth.tsx @@ -1,13 +1,13 @@ import React, { Suspense } from "react"; import styled from "styled-components"; -import { Redirect, Route, Switch } from "react-router-dom"; +import { Navigate, Route, Routes } from "react-router-dom"; import { LoadingPage } from "components"; import useRouter from "hooks/useRouter"; import FormContent from "./components/FormContent"; import News from "./components/News"; -import { Routes } from "packages/cloud/routes"; +import { CloudRoutes } from "packages/cloud/cloudRoutes"; import { LoginPage } from "./LoginPage"; import { SignupPage } from "./SignupPage"; @@ -38,39 +38,38 @@ const NewsPart = styled(Part)` `; const Auth: React.FC = () => { - const { pathname } = useRouter(); + const { pathname, location } = useRouter(); return ( - - - - - - }> - - - - - - - - - - - - - - - - - - - - - - - - + + + + }> + + } /> + } /> + } + /> + } + /> + + } + /> + + + + + + + + ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/auth/ConfirmPasswordResetPage/ConfirmPasswordResetPage.tsx b/airbyte-webapp/src/packages/cloud/views/auth/ConfirmPasswordResetPage/ConfirmPasswordResetPage.tsx index 452ea601c2b9..4c80a39b5241 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/ConfirmPasswordResetPage/ConfirmPasswordResetPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/ConfirmPasswordResetPage/ConfirmPasswordResetPage.tsx @@ -6,7 +6,7 @@ import * as yup from "yup"; import { LabeledInput, Link, LoadingButton } from "components"; import useRouterHook from "hooks/useRouter"; -import { Routes } from "packages/cloud/routes"; +import { CloudRoutes } from "packages/cloud/cloudRoutes"; import { useAuthService } from "packages/cloud/services/auth/AuthService"; import { FormTitle } from "../components/FormTitle"; @@ -43,7 +43,7 @@ const ResetPasswordConfirmPage: React.FC = () => { title: formatMessage({ id: "confirmResetPassword.success" }), isError: false, }); - push(Routes.Login); + push(CloudRoutes.Login); } catch (err) { // Error code reference: // https://firebase.google.com/docs/reference/js/v8/firebase.auth.Auth#confirmpasswordreset @@ -111,7 +111,7 @@ const ResetPasswordConfirmPage: React.FC = () => { - + diff --git a/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx b/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx index 98045be79928..87c8fa7f6c71 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx @@ -13,7 +13,8 @@ import { } from "packages/cloud/views/auth/components/FormComponents"; import { FormTitle } from "packages/cloud/views/auth/components/FormTitle"; import { FieldError } from "packages/cloud/lib/errors/FieldError"; -import { Routes } from "packages/cloud/routes"; +import { CloudRoutes } from "packages/cloud/cloudRoutes"; +import useRouter from "hooks/useRouter"; const LoginPageValidationSchema = yup.object().shape({ email: yup.string().email("form.email.error").required("form.empty.error"), @@ -23,6 +24,7 @@ const LoginPageValidationSchema = yup.object().shape({ const LoginPage: React.FC = () => { const formatMessage = useIntl().formatMessage; const { login } = useAuthService(); + const { location, replace } = useRouter(); return (

@@ -36,15 +38,17 @@ const LoginPage: React.FC = () => { password: "", }} validationSchema={LoginPageValidationSchema} - onSubmit={async (values, { setFieldError }) => - login(values).catch((err) => { - if (err instanceof FieldError) { - setFieldError(err.field, err.message); - } else { - setFieldError("password", err.message); - } - }) - } + onSubmit={async (values, { setFieldError }) => { + return login(values) + .then((_) => replace(location.state?.from ?? "/")) + .catch((err) => { + if (err instanceof FieldError) { + setFieldError(err.field, err.message); + } else { + setFieldError("password", err.message); + } + }); + }} validateOnBlur validateOnChange={false} > @@ -92,7 +96,7 @@ const LoginPage: React.FC = () => { <> - + diff --git a/airbyte-webapp/src/packages/cloud/views/auth/ResetPasswordPage/ResetPasswordPage.tsx b/airbyte-webapp/src/packages/cloud/views/auth/ResetPasswordPage/ResetPasswordPage.tsx index 0fdea14b3d68..c613ff84e210 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/ResetPasswordPage/ResetPasswordPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/ResetPasswordPage/ResetPasswordPage.tsx @@ -6,7 +6,7 @@ import { FormattedMessage, useIntl } from "react-intl"; import { BottomBlock, FieldItem, Form } from "../components/FormComponents"; import { LoadingButton, LabeledInput, Link } from "components"; import { FormTitle } from "../components/FormTitle"; -import { Routes } from "../../../routes"; +import { CloudRoutes } from "../../../cloudRoutes"; import { useAuthService } from "packages/cloud/services/auth/AuthService"; import { useNotificationService } from "hooks/services/Notification/NotificationService"; @@ -64,7 +64,7 @@ const ResetPasswordPage: React.FC = () => { - + diff --git a/airbyte-webapp/src/packages/cloud/views/auth/components/Header.tsx b/airbyte-webapp/src/packages/cloud/views/auth/components/Header.tsx index 2631a808fa89..727ea4cd3615 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/components/Header.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/components/Header.tsx @@ -1,12 +1,12 @@ import React from "react"; import styled from "styled-components"; import { Link } from "react-router-dom"; +import { FormattedMessage } from "react-intl"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faArrowLeft } from "@fortawesome/free-solid-svg-icons"; import { Button } from "components"; -import { Routes } from "../../../routes"; -import { FormattedMessage } from "react-intl"; +import { CloudRoutes } from "../../../cloudRoutes"; const Links = styled.div` width: 100%; @@ -53,18 +53,16 @@ const Header: React.FC = ({ toLogin }) => { - {toLogin ? ( - - ) : ( - - )} + - diff --git a/airbyte-webapp/src/packages/cloud/views/credits/CreditsPage/CreditsPage.tsx b/airbyte-webapp/src/packages/cloud/views/credits/CreditsPage/CreditsPage.tsx index f4a95b7e4909..5f68efdde7b2 100644 --- a/airbyte-webapp/src/packages/cloud/views/credits/CreditsPage/CreditsPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/credits/CreditsPage/CreditsPage.tsx @@ -1,14 +1,14 @@ import React, { Suspense } from "react"; import { FormattedMessage } from "react-intl"; import styled from "styled-components"; -import { Redirect, Route, Switch } from "react-router"; +import { Navigate, Route, Routes } from "react-router-dom"; import HeadTitle from "components/HeadTitle"; import MainPageWithScroll from "components/MainPageWithScroll"; import SideMenu from "components/SideMenu"; import LoadingPage from "components/LoadingPage"; import { CategoryItem } from "components/SideMenu/SideMenu"; -import { Routes } from "packages/cloud/routes"; +import { CloudRoutes } from "packages/cloud/cloudRoutes"; import useRouter from "hooks/useRouter"; import CreditsTitle from "./components/CreditsTitle"; import RemainingCredits from "./components/RemainingCredits"; @@ -38,7 +38,7 @@ const CreditsPage: React.FC = () => { { routes: [ { - path: `${Routes.Credits}`, + path: ``, name: , component: CreditsUsagePage, }, @@ -63,25 +63,30 @@ const CreditsPage: React.FC = () => { /> }> - + {menuItems.flatMap((menuItem) => - menuItem.routes.map((route) => ( + menuItem.routes.map(({ path, component: Component }) => ( } /> )) )} - } /> - + diff --git a/airbyte-webapp/src/packages/cloud/views/credits/CreditsPage/components/RemainingCredits.tsx b/airbyte-webapp/src/packages/cloud/views/credits/CreditsPage/components/RemainingCredits.tsx index 0ab8c5d37bc5..671134c5a992 100644 --- a/airbyte-webapp/src/packages/cloud/views/credits/CreditsPage/components/RemainingCredits.tsx +++ b/airbyte-webapp/src/packages/cloud/views/credits/CreditsPage/components/RemainingCredits.tsx @@ -4,7 +4,7 @@ import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faStar } from "@fortawesome/free-regular-svg-icons"; import styled from "styled-components"; -import { useGetWorkspace } from "packages/cloud/services/workspaces/WorkspacesService"; +import { useGetCloudWorkspace } from "packages/cloud/services/workspaces/WorkspacesService"; import { useCurrentWorkspace } from "hooks/services/useWorkspace"; const Block = styled.div` @@ -28,9 +28,7 @@ const StarIcon = styled(FontAwesomeIcon)` const RemainingCredits: React.FC = () => { const currentWorkspace = useCurrentWorkspace(); - const { data: cloudWorkspace } = useGetWorkspace( - currentWorkspace.workspaceId - ); + const cloudWorkspace = useGetCloudWorkspace(currentWorkspace.workspaceId); return ( diff --git a/airbyte-webapp/src/packages/cloud/views/layout/MainView/ErrorBoundary.tsx b/airbyte-webapp/src/packages/cloud/views/layout/MainView/ErrorBoundary.tsx new file mode 100644 index 000000000000..1fdd8eb3f5da --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/views/layout/MainView/ErrorBoundary.tsx @@ -0,0 +1,44 @@ +import React from "react"; +import { FormattedMessage } from "react-intl"; + +import { CommonRequestError } from "core/request/CommonRequestError"; + +type BoundaryState = { hasError: boolean; message?: React.ReactNode | null }; + +const initialState: BoundaryState = { + hasError: false, + message: null, +}; + +export class ErrorBoundary extends React.Component< + { errorComponent: React.ReactElement }, + BoundaryState +> { + static getDerivedStateFromError(error: CommonRequestError): BoundaryState { + if (error.message.startsWith("Insufficient permissions")) { + return { hasError: true, message: error.message }; + } else if (error.status === 422) { + return { + hasError: true, + message: , + }; + } else { + throw error; + } + } + + state = initialState; + + reset = (): void => { + this.setState(initialState); + }; + + render(): React.ReactNode { + return this.state.hasError + ? React.cloneElement(this.props.errorComponent, { + message: this.state.message, + onReset: this.reset, + }) + : this.props.children; + } +} diff --git a/airbyte-webapp/src/packages/cloud/views/layout/MainView/InsufficientPermissionsErrorBoundary.tsx b/airbyte-webapp/src/packages/cloud/views/layout/MainView/InsufficientPermissionsErrorBoundary.tsx new file mode 100644 index 000000000000..d47fd641e9f0 --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/views/layout/MainView/InsufficientPermissionsErrorBoundary.tsx @@ -0,0 +1,38 @@ +import React from "react"; + +import { CommonRequestError } from "core/request/CommonRequestError"; + +type BoundaryState = { hasError: boolean; message?: React.ReactNode | null }; + +const initialState: BoundaryState = { + hasError: false, + message: null, +}; + +export class InsufficientPermissionsErrorBoundary extends React.Component< + { errorComponent: React.ReactElement }, + BoundaryState +> { + static getDerivedStateFromError(error: CommonRequestError): BoundaryState { + if (error.message.startsWith("Insufficient permissions")) { + return { hasError: true, message: error.message }; + } else { + throw error; + } + } + + state = initialState; + + reset = (): void => { + this.setState(initialState); + }; + + render(): React.ReactNode { + return this.state.hasError + ? React.cloneElement(this.props.errorComponent, { + message: this.state.message, + onReset: this.reset, + }) + : this.props.children; + } +} diff --git a/airbyte-webapp/src/packages/cloud/views/layout/MainView/MainView.tsx b/airbyte-webapp/src/packages/cloud/views/layout/MainView/MainView.tsx index 79b25cd58d36..3e8739a6ca3c 100644 --- a/airbyte-webapp/src/packages/cloud/views/layout/MainView/MainView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/layout/MainView/MainView.tsx @@ -1,6 +1,13 @@ import React from "react"; import styled from "styled-components"; +import { Outlet } from "react-router-dom"; + +import { LoadingPage } from "components"; + import SideBar from "packages/cloud/views/layout/SideBar"; +import { ErrorBoundary } from "./ErrorBoundary"; +import { StartOverErrorView } from "views/common/StartOverErrorView"; +import { ResourceNotFoundErrorBoundary } from "views/common/ResorceNotFoundErrorBoundary"; const MainContainer = styled.div` width: 100%; @@ -17,10 +24,18 @@ const Content = styled.div` height: 100%; `; -const MainView: React.FC = (props) => ( +const MainView: React.FC = ({ children }) => ( - - {props.children} + }> + + + }> + + {children ?? } + + + + ); diff --git a/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx b/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx index 29f810a0fa01..82fe147a53f3 100644 --- a/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx +++ b/airbyte-webapp/src/packages/cloud/views/layout/SideBar/SideBar.tsx @@ -6,9 +6,9 @@ import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faStar } from "@fortawesome/free-solid-svg-icons"; import { useIntercom } from "react-use-intercom"; -import { Routes } from "packages/cloud/routes"; +import { CloudRoutes } from "packages/cloud/cloudRoutes"; -import useWorkspace from "hooks/services/useWorkspace"; +import { useCurrentWorkspace } from "hooks/services/useWorkspace"; import { Link } from "components"; import { WorkspacePopout } from "packages/cloud/views/workspaces/WorkspacePopout"; @@ -19,12 +19,17 @@ import OnboardingIcon from "views/layout/SideBar/components/OnboardingIcon"; import ChatIcon from "views/layout/SideBar/components/ChatIcon"; import SettingsIcon from "views/layout/SideBar/components/SettingsIcon"; import SourceIcon from "views/layout/SideBar/components/SourceIcon"; -import { useGetWorkspace } from "packages/cloud/services/workspaces/WorkspacesService"; +import { useGetCloudWorkspace } from "packages/cloud/services/workspaces/WorkspacesService"; import { NotificationIndicator } from "views/layout/SideBar/NotificationIndicator"; import ResourcesPopup, { - Item, Icon, + Item, } from "views/layout/SideBar/components/ResourcesPopup"; +import { RoutePaths } from "pages/routes"; +import { + FeatureItem, + WithFeature, +} from "../../../../../hooks/services/Feature"; const CreditsIcon = styled(FontAwesomeIcon)` font-size: 21px; @@ -97,8 +102,8 @@ const WorkspaceButton = styled.div` `; const SideBar: React.FC = () => { - const { workspace } = useWorkspace(); - const { data: cloudWorkspace } = useGetWorkspace(workspace.workspaceId); + const workspace = useCurrentWorkspace(); + const cloudWorkspace = useGetCloudWorkspace(workspace.workspaceId); const { show } = useIntercom(); const handleChatUs = () => show(); @@ -108,8 +113,8 @@ const SideBar: React.FC = () => { logo @@ -122,7 +127,7 @@ const SideBar: React.FC = () => { {workspace.displaySetupWizard ? (
  • - + @@ -131,7 +136,7 @@ const SideBar: React.FC = () => {
  • ) : null}
  • - + @@ -139,7 +144,7 @@ const SideBar: React.FC = () => {
  • - + @@ -147,7 +152,7 @@ const SideBar: React.FC = () => {
  • - + @@ -158,7 +163,7 @@ const SideBar: React.FC = () => {
  • - + @@ -196,16 +201,12 @@ const SideBar: React.FC = () => {
  • - - location.pathname.startsWith(Routes.Settings) - } - > - - - + + + + + + diff --git a/airbyte-webapp/src/packages/cloud/views/CloudSettingsPage.tsx b/airbyte-webapp/src/packages/cloud/views/settings/CloudSettingsPage.tsx similarity index 58% rename from airbyte-webapp/src/packages/cloud/views/CloudSettingsPage.tsx rename to airbyte-webapp/src/packages/cloud/views/settings/CloudSettingsPage.tsx index 17a973d513b3..1ea5acf807fd 100644 --- a/airbyte-webapp/src/packages/cloud/views/CloudSettingsPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/settings/CloudSettingsPage.tsx @@ -1,8 +1,8 @@ import React, { useMemo } from "react"; import { FormattedMessage } from "react-intl"; -import useConnector from "hooks/services/useConnector"; -import { PageConfig } from "pages/SettingsPage/SettingsPage"; +// import useConnector from "hooks/services/useConnector"; +import { PageConfig, SettingsRoute } from "pages/SettingsPage/SettingsPage"; import { DestinationsPage as SettingsDestinationPage, SourcesPage as SettingsSourcesPage, @@ -10,13 +10,24 @@ import { import SettingsPage from "pages/SettingsPage"; import ConfigurationsPage from "pages/SettingsPage/pages/ConfigurationsPage"; import NotificationPage from "pages/SettingsPage/pages/NotificationPage"; -import { Routes } from "../routes"; -import { AccountSettingsView } from "./users/AccountSettingsView"; -import { WorkspaceSettingsView } from "./workspaces/WorkspaceSettingsView"; -import { UsersSettingsView } from "./users/UsersSettingsView"; +import { AccountSettingsView } from "packages/cloud/views/users/AccountSettingsView"; +import { WorkspaceSettingsView } from "packages/cloud/views/workspaces/WorkspaceSettingsView"; +import { UsersSettingsView } from "packages/cloud/views/users/UsersSettingsView"; + +const CloudSettingsRoutes = { + Configuration: SettingsRoute.Configuration, + Notifications: SettingsRoute.Notifications, + Account: SettingsRoute.Account, + Source: SettingsRoute.Source, + Destination: SettingsRoute.Destination, + + Workspace: "workspaces", + AccessManagement: "access-management", +} as const; export const CloudSettingsPage: React.FC = () => { - const { countNewSourceVersion, countNewDestinationVersion } = useConnector(); + // TODO: uncomment when supported in cloud + // const { countNewSourceVersion, countNewDestinationVersion } = useConnector(); const pageConfig = useMemo( () => ({ @@ -25,7 +36,7 @@ export const CloudSettingsPage: React.FC = () => { category: , routes: [ { - path: `${Routes.Settings}${Routes.Account}`, + path: CloudSettingsRoutes.Account, name: , component: AccountSettingsView, }, @@ -35,34 +46,34 @@ export const CloudSettingsPage: React.FC = () => { category: , routes: [ { - path: `${Routes.Settings}${Routes.Workspace}`, + path: CloudSettingsRoutes.Workspace, name: , component: WorkspaceSettingsView, }, { - path: `${Routes.Settings}${Routes.Source}`, + path: CloudSettingsRoutes.Source, name: , - indicatorCount: countNewSourceVersion, + // indicatorCount: countNewSourceVersion, component: SettingsSourcesPage, }, { - path: `${Routes.Settings}${Routes.Destination}`, + path: CloudSettingsRoutes.Destination, name: , - indicatorCount: countNewDestinationVersion, + // indicatorCount: countNewDestinationVersion, component: SettingsDestinationPage, }, { - path: `${Routes.Settings}${Routes.Configuration}`, + path: CloudSettingsRoutes.Configuration, name: , component: ConfigurationsPage, }, { - path: `${Routes.Settings}${Routes.AccessManagement}`, + path: CloudSettingsRoutes.AccessManagement, name: , component: UsersSettingsView, }, { - path: `${Routes.Settings}${Routes.Notifications}`, + path: CloudSettingsRoutes.Notifications, name: , component: NotificationPage, }, @@ -70,7 +81,7 @@ export const CloudSettingsPage: React.FC = () => { }, ], }), - [countNewSourceVersion, countNewDestinationVersion] + [] ); return ; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacePopout/WorkspacePopout.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacePopout/WorkspacePopout.tsx index 97b3c7280295..001943121918 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacePopout/WorkspacePopout.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacePopout/WorkspacePopout.tsx @@ -7,10 +7,10 @@ import { MenuListComponentProps } from "react-select/src/components/Menu"; import { Popout } from "components"; import { IDataItem } from "components/base/DropDown/components/Option"; import { - useGetWorkspace, - useListWorkspaces, + useListCloudWorkspaces, useWorkspaceService, } from "packages/cloud/services/workspaces/WorkspacesService"; +import { useCurrentWorkspace } from "services/workspaces/WorkspacesService"; import ExitIcon from "./components/ExitIcon"; @@ -79,7 +79,7 @@ const WorkspacesList: React.FC = ({ selectedWorkspace, ...props }) => { - const { selectWorkspace } = useWorkspaceService(); + const { exitWorkspace } = useWorkspaceService(); return ( @@ -88,7 +88,7 @@ const WorkspacesList: React.FC = ({ {children} - selectWorkspace("")}> + @@ -102,23 +102,25 @@ const WorkspacesList: React.FC = ({ const WorkspacePopout: React.FC<{ children: (props: { onOpen: () => void; value: any }) => React.ReactNode; }> = ({ children }) => { - const { data: workspaces } = useListWorkspaces(); - const { selectWorkspace, currentWorkspaceId } = useWorkspaceService(); - const { data: workspace } = useGetWorkspace(currentWorkspaceId || ""); - - const options = useMemo(() => { - return workspaces - ?.filter((w) => w.workspaceId !== workspace.workspaceId) - .map((workspace) => ({ - value: workspace.workspaceId, - label: workspace.name, - })); - }, [workspaces, workspace]); + const workspaceList = useListCloudWorkspaces(); + const { selectWorkspace } = useWorkspaceService(); + const workspace = useCurrentWorkspace(); + + const options = useMemo( + () => + workspaceList + .filter((w) => w.workspaceId !== workspace.workspaceId) + .map((workspace) => ({ + value: workspace.workspaceId, + label: workspace.name, + })), + [workspaceList, workspace] + ); return ( - children({ onOpen: targetProps.onOpen, value: workspace?.name }) + children({ onOpen: targetProps.onOpen, value: workspace.name }) } components={{ MenuList: (props) => ( @@ -127,7 +129,7 @@ const WorkspacePopout: React.FC<{ }} isSearchable={false} options={options} - value={workspace?.workspaceId} + value={workspace.slug} onChange={({ value }) => selectWorkspace(value)} /> ); diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/WorkspaceSettingsView.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/WorkspaceSettingsView.tsx index e14357aaff28..031f4683ccd8 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/WorkspaceSettingsView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/WorkspaceSettingsView.tsx @@ -1,7 +1,7 @@ import React from "react"; import { FormattedMessage, useIntl } from "react-intl"; import styled from "styled-components"; -import { Form, Formik, Field, FieldProps } from "formik"; +import { Field, FieldProps, Form, Formik } from "formik"; import { Content, @@ -9,7 +9,8 @@ import { } from "pages/SettingsPage/pages/SettingsComponents"; import { Button, LabeledInput, LoadingButton } from "components"; import { - useGetWorkspace, + useRemoveWorkspace, + useUpdateWorkspace, useWorkspaceService, } from "packages/cloud/services/workspaces/WorkspacesService"; import { useCurrentWorkspace } from "hooks/services/useWorkspace"; @@ -35,106 +36,91 @@ const Buttons = styled.div` export const WorkspaceSettingsView: React.FC = () => { const formatMessage = useIntl().formatMessage; - const { - selectWorkspace, - removeWorkspace, - updateWorkspace, - } = useWorkspaceService(); - const currentWorkspace = useCurrentWorkspace(); - const { data: workspace, isLoading } = useGetWorkspace( - currentWorkspace.workspaceId - ); + const { exitWorkspace } = useWorkspaceService(); + const workspace = useCurrentWorkspace(); + const removeWorkspace = useRemoveWorkspace(); + const updateWorkspace = useUpdateWorkspace(); return ( <> - {!isLoading && workspace && workspace.name && workspace.workspaceId && ( - <> - - - - - } - > - - updateWorkspace.mutateAsync({ - workspaceId: workspace.workspaceId, - name: payload.name, - }) - } + + + + + } + > + + updateWorkspace.mutateAsync({ + workspaceId: workspace.workspaceId, + name: payload.name, + }) + } + > + {({ dirty, isSubmitting, resetForm, isValid }) => ( +
    + + + {({ field, meta }: FieldProps) => ( + + } + placeholder={formatMessage({ + id: "settings.generalSettings.form.name.placeholder", + })} + type="text" + error={!!meta.error && meta.touched} + message={ + meta.touched && + meta.error && + formatMessage({ id: meta.error }) + } + /> + )} + + + + + save changes + + + +
    + )} +
    +
    + + + removeWorkspace.mutateAsync(workspace.workspaceId)} > - {({ dirty, isSubmitting, resetForm, isValid }) => ( -
    - - - {({ field, meta }: FieldProps) => ( - - } - placeholder={formatMessage({ - id: - "settings.generalSettings.form.name.placeholder", - })} - type="text" - error={!!meta.error && meta.touched} - message={ - meta.touched && - meta.error && - formatMessage({ id: meta.error }) - } - /> - )} - - - - - save changes - - - -
    - )} -
    -
    - - - - removeWorkspace.mutateAsync(workspace.workspaceId) - } - > - - - - } - /> - - )} + + + + } + /> ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/components/WorkspaceItem.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/components/WorkspaceItem.tsx index ab712fe25a46..16c4d229e6b6 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/components/WorkspaceItem.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/components/WorkspaceItem.tsx @@ -20,13 +20,11 @@ const Arrow = styled(FontAwesomeIcon)` const WorkspaceItem: React.FC<{ onClick: (id: string) => void; id: string }> = ( props -) => { - return ( - props.onClick(props.id)}> -
    {props.children}
    - -
    - ); -}; +) => ( + props.onClick(props.id)}> +
    {props.children}
    + +
    +); export default WorkspaceItem; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/components/WorkspacesList.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/components/WorkspacesList.tsx index 39dc8f06b3b8..1e5c45d09021 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/components/WorkspacesList.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/components/WorkspacesList.tsx @@ -4,7 +4,8 @@ import styled from "styled-components"; import WorkspaceItem from "./WorkspaceItem"; import WorkspacesControl from "./WorkspacesControl"; import { - useListWorkspaces, + useCreateWorkspace, + useListCloudWorkspaces, useWorkspaceService, } from "packages/cloud/services/workspaces/WorkspacesService"; @@ -16,22 +17,21 @@ const Content = styled.div` `; const WorkspacesList: React.FC = () => { - const { data: workspaces } = useListWorkspaces(); - const { selectWorkspace, createWorkspace } = useWorkspaceService(); + const workspaces = useListCloudWorkspaces(); + const { selectWorkspace } = useWorkspaceService(); + const createWorkspace = useCreateWorkspace(); return ( - {workspaces?.length - ? workspaces.map((workspace) => ( - - {workspace.name} - - )) - : null} + {workspaces.map((workspace) => ( + + {workspace.name} + + ))} ); diff --git a/airbyte-webapp/src/pages/ConnectionPage/ConnectionPage.tsx b/airbyte-webapp/src/pages/ConnectionPage/ConnectionPage.tsx index bdb7f0130cfd..2b80b28f72fb 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/ConnectionPage.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/ConnectionPage.tsx @@ -1,56 +1,30 @@ import React, { Suspense } from "react"; -import { Redirect, Route, Switch } from "react-router-dom"; +import { Navigate, Route, Routes } from "react-router-dom"; import { NetworkErrorBoundary as ErrorBoundary } from "rest-hooks"; -import { Routes } from "../routes"; +import { RoutePaths } from "../routes"; import LoadingPage from "components/LoadingPage"; import ConnectionItemPage from "./pages/ConnectionItemPage"; import CreationFormPage from "./pages/CreationFormPage"; -import useRouter from "hooks/useRouter"; import AllConnectionsPage from "./pages/AllConnectionsPage"; -const FallbackRootRedirector = () => ; +const FallbackRootNavigateor = () => ; const ConnectionPage: React.FC = () => { - const { location } = useRouter(); - return ( }> - + + } /> - - - - - - - - - - - - - - - - - + path=":id/*" + element={ + + + + } + /> + } /> + ); }; diff --git a/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx b/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx index fb3ff839fcb9..105491c70da8 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx @@ -5,7 +5,7 @@ import { useResource } from "rest-hooks"; import { Button, MainPageWithScroll, PageTitle, LoadingPage } from "components"; import ConnectionResource from "core/resources/Connection"; import ConnectionsTable from "./components/ConnectionsTable"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import useRouter from "hooks/useRouter"; import HeadTitle from "components/HeadTitle"; import Placeholder, { ResourceTypes } from "components/Placeholder"; @@ -19,7 +19,7 @@ const AllConnectionsPage: React.FC = () => { workspaceId: workspace.workspaceId, }); - const onClick = () => push(`${Routes.Connections}${Routes.ConnectionNew}`); + const onClick = () => push(`${RoutePaths.ConnectionNew}`); return ( = ({ connections }) => { const { push } = useRouter(); - const { workspace } = useWorkspace(); const { changeStatus, syncManualConnection } = useSyncActions(); - const { sourceDefinitions } = useResource( - SourceDefinitionResource.listShape(), - { - workspaceId: workspace.workspaceId, - } - ); + const { sourceDefinitions } = useSourceDefinitionList(); - const { destinationDefinitions } = useResource( - DestinationDefinitionResource.listShape(), - { - workspaceId: workspace.workspaceId, - } - ); + const { destinationDefinitions } = useDestinationDefinitionList(); const data = getConnectionTableData( connections, @@ -67,8 +53,7 @@ const ConnectionsTable: React.FC = ({ connections }) => { [connections, syncManualConnection] ); - const clickRow = (source: ITableDataItem) => - push(`${Routes.Connections}/${source.connectionId}`); + const clickRow = (source: ITableDataItem) => push(`${source.connectionId}`); return ( = ({ - currentStep, -}) => { - const { query, push } = useRouter<{ id: string }>(); - const analyticsService = useAnalyticsService(); +const ConnectionItemPage: React.FC = () => { + const { params, push } = useRouter<{ id: string }>(); + const { id } = params; + const currentStep = params["*"] || "status"; const connection = useResource(ConnectionResource.detailShape(), { - connectionId: query.id, + connectionId: id, }); - const frequency = FrequencyConfig.find((item) => - equal(item.config, connection.schedule) - ); - const { source, destination } = connection; const sourceDefinition = useResource( @@ -69,14 +60,18 @@ const ConnectionItemPage: React.FC = ({ const onSelectStep = (id: string) => { if (id === "settings") { - push( - `${Routes.Connections}/${connection.connectionId}${Routes.Settings}` - ); + push(`${RoutePaths.Settings}`); } else { - push(`${Routes.Connections}/${connection.connectionId}`); + push(""); } }; + const analyticsService = useAnalyticsService(); + + const frequency = FrequencyConfig.find((item) => + equal(item.config, connection.schedule) + ); + const onAfterSaveSchema = () => { analyticsService.track("Source - Action", { action: "Edit schema", @@ -88,41 +83,6 @@ const ConnectionItemPage: React.FC = ({ }); }; - const renderStep = () => { - if (currentStep === "status") { - return ( - - ); - } - - return ( - - ); - }; - - const linkToSource = () => ( - - {source.name} - - ); - - const linkToDestination = () => ( - - {destination.name} - - ); - return ( = ({ + {source.name} + + ), + destination: ( + + {destination.name} + + ), }} /> } @@ -162,7 +136,34 @@ const ConnectionItemPage: React.FC = ({ /> } > - }>{renderStep()} + }> + + + } + /> + + } + /> + } /> + + ); }; diff --git a/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/SettingsView.tsx b/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/SettingsView.tsx index d65f57ea035d..f0476cfea2b4 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/SettingsView.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/SettingsView.tsx @@ -15,13 +15,12 @@ import ConnectionForm from "views/Connection/ConnectionForm"; import ResetDataModal from "components/ResetDataModal"; import { ModalTypes } from "components/ResetDataModal/types"; import LoadingSchema from "components/LoadingSchema"; -import { DestinationDefinition } from "core/resources/DestinationDefinition"; -import { SourceDefinition } from "core/resources/SourceDefinition"; import { equal } from "utils/objects"; import EnabledControl from "./EnabledControl"; import { ConnectionNamespaceDefinition } from "core/domain/connection"; import { useAsyncFn } from "react-use"; +import { DestinationDefinition, SourceDefinition } from "core/domain/connector"; type IProps = { onAfterSaveSchema: () => void; diff --git a/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/StatusMainInfo.tsx b/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/StatusMainInfo.tsx index ba76676c4403..9d1c8f742923 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/StatusMainInfo.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/StatusMainInfo.tsx @@ -7,8 +7,7 @@ import ImageBlock from "components/ImageBlock"; import { Header, Row, Cell } from "components/SimpleTableComponents"; import EnabledControl from "./EnabledControl"; import { Connection } from "core/resources/Connection"; -import { DestinationDefinition } from "core/resources/DestinationDefinition"; -import { SourceDefinition } from "core/resources/SourceDefinition"; +import { DestinationDefinition, SourceDefinition } from "core/domain/connector"; const MainInfo = styled(ContentCard)` margin-bottom: 14px; diff --git a/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/StatusView.tsx b/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/StatusView.tsx index 8b657dc792d3..5c002bdf8a63 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/StatusView.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/pages/ConnectionItemPage/components/StatusView.tsx @@ -15,9 +15,8 @@ import EmptyResource from "components/EmptyResourceBlock"; import ResetDataModal from "components/ResetDataModal"; import useConnection from "hooks/services/useConnectionHook"; import useLoadingState from "hooks/useLoadingState"; -import { DestinationDefinition } from "core/resources/DestinationDefinition"; -import { SourceDefinition } from "core/resources/SourceDefinition"; import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; +import { DestinationDefinition, SourceDefinition } from "core/domain/connector"; type IProps = { connection: Connection; diff --git a/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/CreationFormPage.tsx b/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/CreationFormPage.tsx index 4b7905fc67f9..c7d98ce971ad 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/CreationFormPage.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/CreationFormPage.tsx @@ -2,7 +2,7 @@ import React, { useState } from "react"; import { FormattedMessage } from "react-intl"; import { useResource } from "rest-hooks"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import useRouter from "hooks/useRouter"; import MainPageWithScroll from "components/MainPageWithScroll"; import PageTitle from "components/PageTitle"; @@ -15,17 +15,19 @@ import ExistingEntityForm from "./components/ExistingEntityForm"; import SourceForm from "./components/SourceForm"; import DestinationForm from "./components/DestinationForm"; import CreateConnectionContent from "components/CreateConnectionContent"; -import SourceResource, { Source } from "core/resources/Source"; -import DestinationResource, { Destination } from "core/resources/Destination"; -import DestinationDefinitionResource, { +import SourceResource from "core/resources/Source"; +import DestinationResource from "core/resources/Destination"; +import DestinationDefinitionResource from "core/resources/DestinationDefinition"; +import SourceDefinitionResource from "core/resources/SourceDefinition"; +import { + Destination, DestinationDefinition, -} from "core/resources/DestinationDefinition"; -import SourceDefinitionResource, { + Source, SourceDefinition, -} from "core/resources/SourceDefinition"; +} from "core/domain/connector"; type IProps = { - type: "source" | "destination" | "connection"; + type?: "source" | "destination" | "connection"; }; export enum StepsTypes { @@ -86,8 +88,18 @@ function usePreloadData(): { return { source, sourceDefinition, destination, destinationDefinition }; } -const CreationFormPage: React.FC = ({ type }) => { +const CreationFormPage: React.FC = ({}) => { const { location, push } = useRouter(); + + const locationType = location.pathname.split("/")[2]; + + const type = + locationType === "connections" + ? "connection" + : locationType === "source" + ? "destination" + : "source"; + const hasConnectors = location.state?.sourceId && location.state?.destinationId; const [currentStep, setCurrentStep] = useState( @@ -108,7 +120,7 @@ const CreationFormPage: React.FC = ({ type }) => { } = usePreloadData(); const onSelectExistingSource = (id: string) => { - push({ + push("", { state: { ...(location.state as Record), sourceId: id, @@ -119,10 +131,10 @@ const CreationFormPage: React.FC = ({ type }) => { }; const onSelectExistingDestination = (id: string) => { - push({ + push("", { state: { ...(location.state as Record), - destinationId: id, + sourceId: id, }, }); setCurrentEntityStep(EntityStepsTypes.CONNECTION); @@ -179,13 +191,13 @@ const CreationFormPage: React.FC = ({ type }) => { const afterSubmitConnection = () => { switch (type) { case "destination": - push(`${Routes.Source}/${source?.sourceId}`); + push(`../${source?.sourceId}`); break; case "source": - push(`${Routes.Destination}/${destination?.destinationId}`); + push(`../${destination?.destinationId}`); break; default: - push(`${Routes.Connections}`); + push(`../${RoutePaths.Connections}`); break; } }; diff --git a/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/components/DestinationForm.tsx b/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/components/DestinationForm.tsx index 063dce5d8fc0..ceffb6046d3e 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/components/DestinationForm.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/components/DestinationForm.tsx @@ -46,12 +46,15 @@ const CreateDestinationPage: React.FC = ({ afterSubmit }) => { setSuccessRequest(true); setTimeout(() => { setSuccessRequest(false); - push({ - state: { - ...(location.state as Record), - destinationId: result.destinationId, - }, - }); + push( + {}, + { + state: { + ...(location.state as Record), + destinationId: result.destinationId, + }, + } + ); afterSubmit(); }, 2000); } catch (e) { diff --git a/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/components/SourceForm.tsx b/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/components/SourceForm.tsx index 9fd9bccca964..3a5bc24a36fd 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/components/SourceForm.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/pages/CreationFormPage/components/SourceForm.tsx @@ -42,12 +42,15 @@ const SourceFormComponent: React.FC = ({ afterSubmit }) => { setSuccessRequest(true); setTimeout(() => { setSuccessRequest(false); - push({ - state: { - ...(location.state as Record), - sourceId: result.sourceId, - }, - }); + push( + {}, + { + state: { + ...(location.state as Record), + sourceId: result.sourceId, + }, + } + ); afterSubmit(); }, 2000); } catch (e) { diff --git a/airbyte-webapp/src/pages/DestinationPage/DestinationPage.tsx b/airbyte-webapp/src/pages/DestinationPage/DestinationPage.tsx index c6c217b3ea32..d237c69b7f2c 100644 --- a/airbyte-webapp/src/pages/DestinationPage/DestinationPage.tsx +++ b/airbyte-webapp/src/pages/DestinationPage/DestinationPage.tsx @@ -1,42 +1,34 @@ -import React, { Suspense } from "react"; -import { Redirect, Route, Switch } from "react-router-dom"; +import React from "react"; +import { Navigate, Route, Routes } from "react-router-dom"; import { NetworkErrorBoundary as ErrorBoundary } from "rest-hooks"; -import { Routes } from "../routes"; -import LoadingPage from "components/LoadingPage"; +import { RoutePaths } from "../routes"; import AllDestinationsPage from "./pages/AllDestinationsPage"; import DestinationItemPage from "./pages/DestinationItemPage"; import CreateDestinationPage from "./pages/CreateDestinationPage"; -import ConnectionPage from "../ConnectionPage"; +import CreationFormPage from "../ConnectionPage/pages/CreationFormPage"; -const FallbackRootRedirector = () => ; +const FallbackRootNavigateor = () => ; const DestinationsPage: React.FC = () => { return ( - }> - - - - - - - - - - - - + + } + /> + } /> + - - - - + } + /> + } /> + } /> + ); }; diff --git a/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx b/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx index 2edc0667d889..f18f7e1fe588 100644 --- a/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx +++ b/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx @@ -3,7 +3,7 @@ import { FormattedMessage } from "react-intl"; import { useResource } from "rest-hooks"; import { Button, MainPageWithScroll } from "components"; -import { Routes } from "../../../routes"; +import { RoutePaths } from "pages/routes"; import PageTitle from "components/PageTitle"; import useRouter from "hooks/useRouter"; import DestinationsTable from "./components/DestinationsTable"; @@ -19,8 +19,7 @@ const AllDestinationsPage: React.FC = () => { workspaceId: workspace.workspaceId, }); - const onCreateDestination = () => - push(`${Routes.Destination}${Routes.DestinationNew}`); + const onCreateDestination = () => push(`${RoutePaths.DestinationNew}`); return ( = ({ destinations }) => { ); const clickRow = (destination: EntityTableDataItem) => - push(`${Routes.Destination}/${destination.entityId}`); + push(`${destination.entityId}`); return ( { setSuccessRequest(true); setTimeout(() => { setSuccessRequest(false); - push(`${Routes.Destination}/${result.destinationId}`); + push(`../${result.destinationId}`); }, 2000); } catch (e) { setErrorStatusRequest(e); diff --git a/airbyte-webapp/src/pages/DestinationPage/pages/CreateDestinationPage/components/DestinationForm.tsx b/airbyte-webapp/src/pages/DestinationPage/pages/CreateDestinationPage/components/DestinationForm.tsx index 93472030774c..91ec28a95051 100644 --- a/airbyte-webapp/src/pages/DestinationPage/pages/CreateDestinationPage/components/DestinationForm.tsx +++ b/airbyte-webapp/src/pages/DestinationPage/pages/CreateDestinationPage/components/DestinationForm.tsx @@ -9,8 +9,8 @@ import { JobInfo } from "core/resources/Scheduler"; import { JobsLogItem } from "components/JobItem"; import { createFormErrorMessage } from "utils/errorStatusMessage"; import { ConnectionConfiguration } from "core/domain/connection"; -import { DestinationDefinition } from "core/resources/DestinationDefinition"; import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; +import { DestinationDefinition } from "core/domain/connector"; type IProps = { onSubmit: (values: { diff --git a/airbyte-webapp/src/pages/DestinationPage/pages/DestinationItemPage/DestinationItemPage.tsx b/airbyte-webapp/src/pages/DestinationPage/pages/DestinationItemPage/DestinationItemPage.tsx index fc3d803cc9c2..5a5179fcacf0 100644 --- a/airbyte-webapp/src/pages/DestinationPage/pages/DestinationItemPage/DestinationItemPage.tsx +++ b/airbyte-webapp/src/pages/DestinationPage/pages/DestinationItemPage/DestinationItemPage.tsx @@ -6,7 +6,7 @@ import PageTitle from "components/PageTitle"; import useRouter from "hooks/useRouter"; import Placeholder, { ResourceTypes } from "components/Placeholder"; import ConnectionResource from "core/resources/Connection"; -import { Routes } from "../../../routes"; +import { RoutePaths } from "pages/routes"; import Breadcrumbs from "components/Breadcrumbs"; import DestinationConnectionTable from "./components/DestinationConnectionTable"; import DestinationResource from "core/resources/Destination"; @@ -28,7 +28,7 @@ import useWorkspace from "hooks/services/useWorkspace"; import { DropDownRow } from "components"; const DestinationItemPage: React.FC = () => { - const { query, push } = useRouter<{ id: string }>(); + const { params, push } = useRouter(); const { workspace } = useWorkspace(); const [currentStep, setCurrentStep] = useState(StepsTypes.OVERVIEW); const onSelectStep = (id: string) => setCurrentStep(id); @@ -45,7 +45,7 @@ const DestinationItemPage: React.FC = () => { ); const destination = useResource(DestinationResource.detailShape(), { - destinationId: query.id, + destinationId: params.id, }); const destinationDefinition = useResource( @@ -59,7 +59,7 @@ const DestinationItemPage: React.FC = () => { workspaceId: workspace.workspaceId, }); - const onClickBack = () => push(Routes.Destination); + const onClickBack = () => push(".."); const breadcrumbsData = [ { @@ -90,20 +90,16 @@ const DestinationItemPage: React.FC = () => { ); const onSelect = (data: DropDownRow.IDataItem) => { - if (data.value === "create-new-item") { - push({ - pathname: `${Routes.Destination}${Routes.ConnectionNew}`, - state: { destinationId: destination.destinationId }, - }); - } else { - push({ - pathname: `${Routes.Destination}${Routes.ConnectionNew}`, - state: { - sourceId: data.value, - destinationId: destination.destinationId, - }, - }); - } + const path = `../${RoutePaths.ConnectionNew}`; + const state = + data.value === "create-new-item" + ? { destinationId: destination.destinationId } + : { + sourceId: data.value, + destinationId: destination.destinationId, + }; + + push(path, { state }); }; const renderContent = () => { diff --git a/airbyte-webapp/src/pages/DestinationPage/pages/DestinationItemPage/components/DestinationConnectionTable.tsx b/airbyte-webapp/src/pages/DestinationPage/pages/DestinationItemPage/components/DestinationConnectionTable.tsx index 926a2578e50a..d98e40a5be3f 100644 --- a/airbyte-webapp/src/pages/DestinationPage/pages/DestinationItemPage/components/DestinationConnectionTable.tsx +++ b/airbyte-webapp/src/pages/DestinationPage/pages/DestinationItemPage/components/DestinationConnectionTable.tsx @@ -2,7 +2,7 @@ import React, { useCallback } from "react"; import { useResource } from "rest-hooks"; import { ConnectionTable } from "components/EntityTable"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import useRouter from "hooks/useRouter"; import { Connection } from "core/resources/Connection"; import useSyncActions from "components/EntityTable/hooks"; @@ -68,7 +68,7 @@ const DestinationConnectionTable: React.FC = ({ connections }) => { ); const clickRow = (source: ITableDataItem) => - push(`${Routes.Connections}/${source.connectionId}`); + push(`../../${RoutePaths.Connections}/${source.connectionId}`); return ( = ({ response: JobInfo; } | null>(null); - const destinationSpecification = useResource( - DestinationDefinitionSpecificationResource.detailShape(), - { - destinationDefinitionId: currentDestination.destinationDefinitionId, - } + const destinationSpecification = useDestinationDefinitionSpecificationLoadAsync( + currentDestination.destinationDefinitionId ); const destinationDefinition = useResource( diff --git a/airbyte-webapp/src/pages/OnboardingPage/OnboardingPage.tsx b/airbyte-webapp/src/pages/OnboardingPage/OnboardingPage.tsx index 84bbf03477b6..7f75e9c1107d 100644 --- a/airbyte-webapp/src/pages/OnboardingPage/OnboardingPage.tsx +++ b/airbyte-webapp/src/pages/OnboardingPage/OnboardingPage.tsx @@ -28,7 +28,7 @@ import StepsCounter from "./components/StepsCounter"; import LoadingPage from "components/LoadingPage"; import useWorkspace from "hooks/services/useWorkspace"; import useRouterHook from "hooks/useRouter"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import { FormattedMessage } from "react-intl"; const Content = styled.div<{ big?: boolean; medium?: boolean }>` @@ -111,7 +111,7 @@ const OnboardingPage: React.FC = () => { const handleFinishOnboarding = () => { finishOnboarding(); - push(Routes.Connections); + push(RoutePaths.Connections); }; const renderStep = () => { diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/ConnectionStep.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/ConnectionStep.tsx index 57b6e4641474..c12d4bc6f818 100644 --- a/airbyte-webapp/src/pages/OnboardingPage/components/ConnectionStep.tsx +++ b/airbyte-webapp/src/pages/OnboardingPage/components/ConnectionStep.tsx @@ -1,11 +1,10 @@ import React from "react"; import CreateConnectionContent from "components/CreateConnectionContent"; -import { Source } from "core/resources/Source"; -import { Destination } from "core/resources/Destination"; import TitlesBlock from "./TitlesBlock"; import { FormattedMessage } from "react-intl"; import HighlightedText from "./HighlightedText"; +import { Destination, Source } from "core/domain/connector"; type IProps = { errorStatus?: number; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx index 37548c2a479c..c7a583da7f80 100644 --- a/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx +++ b/airbyte-webapp/src/pages/OnboardingPage/components/DestinationStep.tsx @@ -9,11 +9,11 @@ import { useDestinationDefinitionSpecificationLoad } from "hooks/services/useDes import { createFormErrorMessage } from "utils/errorStatusMessage"; import { JobInfo } from "core/resources/Scheduler"; import { ConnectionConfiguration } from "core/domain/connection"; -import { DestinationDefinition } from "core/resources/DestinationDefinition"; import TitlesBlock from "./TitlesBlock"; import HighlightedText from "./HighlightedText"; import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; +import { DestinationDefinition } from "core/domain/connector"; type IProps = { availableServices: DestinationDefinition[]; diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/ProgressBlock.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/ProgressBlock.tsx index d25bacf152c6..fdc1c3d39288 100644 --- a/airbyte-webapp/src/pages/OnboardingPage/components/ProgressBlock.tsx +++ b/airbyte-webapp/src/pages/OnboardingPage/components/ProgressBlock.tsx @@ -7,7 +7,7 @@ import { faChevronRight } from "@fortawesome/free-solid-svg-icons"; import { Connection } from "core/domain/connection"; import Link from "components/Link"; import { Button, H1 } from "components/base"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import Status from "core/statuses"; const run = keyframes` @@ -104,17 +104,17 @@ const ProgressBlock: React.FC = ({ values={{ sr: (...sr: React.ReactNode[]) => ( <> - {sr}{" "} + {sr}{" "} ), ds: (...ds: React.ReactNode[]) => ( - + {ds} ), sync: (...sync: React.ReactNode[]) => ( - + {sync} ), diff --git a/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx b/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx index 2dd269eaf0fd..fa96ba13bfa1 100644 --- a/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx +++ b/airbyte-webapp/src/pages/OnboardingPage/components/SourceStep.tsx @@ -3,7 +3,6 @@ import { FormattedMessage } from "react-intl"; import { ConnectionConfiguration } from "core/domain/connection"; import { JobInfo } from "core/resources/Scheduler"; -import { SourceDefinition } from "core/resources/SourceDefinition"; import ContentCard from "components/ContentCard"; import ServiceForm from "views/Connector/ServiceForm"; @@ -15,6 +14,7 @@ import { createFormErrorMessage } from "utils/errorStatusMessage"; import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; import HighlightedText from "./HighlightedText"; import TitlesBlock from "./TitlesBlock"; +import { SourceDefinition } from "core/domain/connector"; type IProps = { onSubmit: (values: { diff --git a/airbyte-webapp/src/pages/PreferencesPage/PreferencesPage.tsx b/airbyte-webapp/src/pages/PreferencesPage/PreferencesPage.tsx index 395f56cc28a8..9f6990b772a9 100644 --- a/airbyte-webapp/src/pages/PreferencesPage/PreferencesPage.tsx +++ b/airbyte-webapp/src/pages/PreferencesPage/PreferencesPage.tsx @@ -1,12 +1,13 @@ -import React, { useEffect } from "react"; +import React from "react"; import { FormattedMessage } from "react-intl"; import styled from "styled-components"; import { PageViewContainer } from "components/CenteredPageComponents"; +import HeadTitle from "components/HeadTitle"; import { H1 } from "components"; import { PreferencesForm } from "views/Settings/PreferencesForm"; -import HeadTitle from "components/HeadTitle"; -import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; + +import { useTrackPage } from "hooks/services/Analytics/useAnalyticsService"; import useWorkspace from "hooks/services/useWorkspace"; const Title = styled(H1)` @@ -14,36 +15,17 @@ const Title = styled(H1)` `; const PreferencesPage: React.FC = () => { - const analyticsService = useAnalyticsService(); - useEffect(() => analyticsService.page("Preferences Page"), [ - analyticsService, - ]); + useTrackPage("Preferences Page"); const { setInitialSetupConfig } = useWorkspace(); - const onSubmit = async (data: { - email: string; - anonymousDataCollection: boolean; - news: boolean; - securityUpdates: boolean; - }) => { - await setInitialSetupConfig(data); - - analyticsService.track("Specified Preferences", { - email: data.email, - anonymized: data.anonymousDataCollection, - subscribed_newsletter: data.news, - subscribed_security: data.securityUpdates, - }); - }; - return ( <FormattedMessage id={"preferences.title"} /> - + ); }; diff --git a/airbyte-webapp/src/pages/SettingsPage/SettingsPage.tsx b/airbyte-webapp/src/pages/SettingsPage/SettingsPage.tsx index 5ca6d0be209f..f627debe0807 100644 --- a/airbyte-webapp/src/pages/SettingsPage/SettingsPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/SettingsPage.tsx @@ -1,7 +1,7 @@ import React, { Suspense } from "react"; import { FormattedMessage } from "react-intl"; +import { Navigate, Route, Routes } from "react-router-dom"; import styled from "styled-components"; -import { Redirect, Route, Switch } from "react-router"; import useConnector from "hooks/services/useConnector"; import MainPageWithScroll from "components/MainPageWithScroll"; @@ -9,7 +9,7 @@ import PageTitle from "components/PageTitle"; import LoadingPage from "components/LoadingPage"; import HeadTitle from "components/HeadTitle"; import SideMenu from "components/SideMenu"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import useRouter from "hooks/useRouter"; import NotificationPage from "./pages/NotificationPage"; import ConfigurationsPage from "./pages/ConfigurationsPage"; @@ -37,6 +37,15 @@ type SettingsPageProps = { pageConfig?: PageConfig; }; +export const SettingsRoute = { + Account: "account", + Destination: "destination", + Source: "source", + Configuration: "configuration", + Notifications: "notifications", + Metrics: "metrics", +} as const; + const SettingsPage: React.FC = ({ pageConfig }) => { const { push, pathname } = useRouter(); const { countNewSourceVersion, countNewDestinationVersion } = useConnector(); @@ -45,34 +54,34 @@ const SettingsPage: React.FC = ({ pageConfig }) => { { routes: [ { - path: `${Routes.Settings}${Routes.Account}`, + path: `${SettingsRoute.Account}`, name: , component: AccountPage, }, { - path: `${Routes.Settings}${Routes.Source}`, + path: `${SettingsRoute.Source}`, name: , indicatorCount: countNewSourceVersion, component: SourcesPage, }, { - path: `${Routes.Settings}${Routes.Destination}`, + path: `${SettingsRoute.Destination}`, name: , indicatorCount: countNewDestinationVersion, component: DestinationsPage, }, { - path: `${Routes.Settings}${Routes.Configuration}`, + path: `${SettingsRoute.Configuration}`, name: , component: ConfigurationsPage, }, { - path: `${Routes.Settings}${Routes.Notifications}`, + path: `${SettingsRoute.Notifications}`, name: , component: NotificationPage, }, { - path: `${Routes.Settings}${Routes.Metrics}`, + path: `${SettingsRoute.Metrics}`, name: , component: MetricsPage, }, @@ -99,25 +108,31 @@ const SettingsPage: React.FC = ({ pageConfig }) => { }> - + {menuItems.flatMap((menuItem) => - menuItem.routes.map((route) => ( + menuItem.routes.map(({ path, component: Component }) => ( } /> )) )} - } /> - + diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/DestinationsPage.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/DestinationsPage.tsx index 65622950c3c2..ef360b720bdf 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/DestinationsPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/DestinationsPage.tsx @@ -5,10 +5,10 @@ import { useAsyncFn } from "react-use"; import DestinationDefinitionResource from "core/resources/DestinationDefinition"; import { DestinationResource } from "core/resources/Destination"; -import { DestinationDefinition } from "core/resources/DestinationDefinition"; import useConnector from "hooks/services/useConnector"; import ConnectorsView from "./components/ConnectorsView"; import useWorkspace from "hooks/services/useWorkspace"; +import { DestinationDefinition } from "core/domain/connector"; const DestinationsPage: React.FC = () => { const { workspace } = useWorkspace(); diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/SourcesPage.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/SourcesPage.tsx index 4e8b9bedf8b0..36670029fab5 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/SourcesPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/SourcesPage.tsx @@ -3,13 +3,12 @@ import { useIntl } from "react-intl"; import { useFetcher } from "rest-hooks"; import { useAsyncFn } from "react-use"; -import SourceDefinitionResource, { - SourceDefinition, -} from "core/resources/SourceDefinition"; +import SourceDefinitionResource from "core/resources/SourceDefinition"; import useConnector from "hooks/services/useConnector"; import ConnectorsView from "./components/ConnectorsView"; import { useSourceDefinitionList } from "hooks/services/useSourceDefinition"; import { useSourceList } from "hooks/services/useSourceHook"; +import { SourceDefinition } from "core/domain/connector"; const SourcesPage: React.FC = () => { const [isUpdateSuccess, setIsUpdateSucces] = useState(false); diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/ConnectorsView.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/ConnectorsView.tsx index 8866fa0add93..2d61daff1df4 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/ConnectorsView.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/ConnectorsView.tsx @@ -7,12 +7,15 @@ import ConnectorCell from "./ConnectorCell"; import ImageCell from "./ImageCell"; import VersionCell from "./VersionCell"; import { Block, FormContentTitle, Title } from "./PageComponents"; -import { SourceDefinition } from "core/resources/SourceDefinition"; import UpgradeAllButton from "./UpgradeAllButton"; import CreateConnector from "./CreateConnector"; import HeadTitle from "components/HeadTitle"; -import { DestinationDefinition } from "core/resources/DestinationDefinition"; -import { Connector, ConnectorDefinition } from "core/domain/connector"; +import { + Connector, + ConnectorDefinition, + DestinationDefinition, + SourceDefinition, +} from "core/domain/connector"; import { FeatureItem, useFeatureService, diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/CreateConnector.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/CreateConnector.tsx index 1bc56e600999..d4cda9d0efa8 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/CreateConnector.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/CreateConnector.tsx @@ -5,7 +5,7 @@ import { useFetcher } from "rest-hooks"; import { Button } from "components"; import SourceDefinitionResource from "core/resources/SourceDefinition"; import useRouter from "hooks/useRouter"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import DestinationDefinitionResource from "core/resources/DestinationDefinition"; import CreateConnectorModal from "./CreateConnectorModal"; @@ -57,10 +57,12 @@ const CreateConnector: React.FC = ({ type }) => { ], ]); - push({ - pathname: `${Routes.Source}${Routes.SourceNew}`, - state: { sourceDefinitionId: result.sourceDefinitionId }, - }); + push( + { + pathname: `${RoutePaths.Source}${RoutePaths.SourceNew}`, + }, + { state: { sourceDefinitionId: result.sourceDefinitionId } } + ); } catch (e) { setErrorMessage(e.message || formatMessage({ id: "form.dockerError" })); } @@ -92,10 +94,12 @@ const CreateConnector: React.FC = ({ type }) => { ] ); - push({ - pathname: `${Routes.Destination}${Routes.DestinationNew}`, - state: { destinationDefinitionId: result.destinationDefinitionId }, - }); + push( + { + pathname: `${RoutePaths.Destination}${RoutePaths.DestinationNew}`, + }, + { state: { destinationDefinitionId: result.destinationDefinitionId } } + ); } catch (e) { setErrorMessage(e.message || formatMessage({ id: "form.dockerError" })); } diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/VersionCell.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/VersionCell.tsx index 2901052caa80..8c1ac430f172 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/VersionCell.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/VersionCell.tsx @@ -5,7 +5,7 @@ import { FormattedMessage, useIntl } from "react-intl"; import { Input, LoadingButton } from "components"; import { FormContent } from "./PageComponents"; -import { Constants } from "constants/constants"; +import { DEV_IMAGE_TAG } from "core/domain/connector/constants"; type IProps = { version: string; @@ -87,7 +87,7 @@ const VersionCell: React.FC = ({ }; const isConnectorUpdateable = - currentVersion !== version || currentVersion === Constants.DEV_IMAGE_TAG; + currentVersion !== version || currentVersion === DEV_IMAGE_TAG; return ( diff --git a/airbyte-webapp/src/pages/SourcesPage/SourcesPage.tsx b/airbyte-webapp/src/pages/SourcesPage/SourcesPage.tsx index 55cee2fdcc69..fffbf93c5faa 100644 --- a/airbyte-webapp/src/pages/SourcesPage/SourcesPage.tsx +++ b/airbyte-webapp/src/pages/SourcesPage/SourcesPage.tsx @@ -1,43 +1,30 @@ -import React, { Suspense } from "react"; -import { Redirect, Route, Switch } from "react-router-dom"; +import React from "react"; +import { Navigate, Route, Routes } from "react-router-dom"; import { NetworkErrorBoundary as ErrorBoundary } from "rest-hooks"; -import { Routes } from "../routes"; -import LoadingPage from "components/LoadingPage"; -import ConnectionPage from "pages/ConnectionPage"; +import { RoutePaths } from "pages/routes"; import AllSourcesPage from "./pages/AllSourcesPage"; import CreateSourcePage from "./pages/CreateSourcePage"; import SourceItemPage from "./pages/SourceItemPage"; +import CreationFormPage from "pages/ConnectionPage/pages/CreationFormPage"; -const FallbackRootRedirector = () => ; +const FallbackRootNavigateor = () => ; -const SourcesPage: React.FC = () => { - return ( - }> - - - - - - - - - - - - - - - - - - - ); -}; +const SourcesPage: React.FC = () => ( + + } /> + } /> + + + + } + /> + } /> + } /> + +); export default SourcesPage; diff --git a/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx b/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx index 7933a4fd3db3..c2cfc6f7a306 100644 --- a/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx +++ b/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx @@ -3,7 +3,7 @@ import { FormattedMessage } from "react-intl"; import { useResource } from "rest-hooks"; import { Button, MainPageWithScroll } from "components"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import PageTitle from "components/PageTitle"; import useRouter from "hooks/useRouter"; import SourcesTable from "./components/SourcesTable"; @@ -19,7 +19,7 @@ const AllSourcesPage: React.FC = () => { workspaceId: workspace.workspaceId, }); - const onCreateSource = () => push(`${Routes.Source}${Routes.SourceNew}`); + const onCreateSource = () => push(`${RoutePaths.SourceNew}`); return ( } diff --git a/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/components/SourcesTable.tsx b/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/components/SourcesTable.tsx index 7cc610326c5b..5467495e647c 100644 --- a/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/components/SourcesTable.tsx +++ b/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/components/SourcesTable.tsx @@ -3,12 +3,10 @@ import React from "react"; import { ImplementationTable } from "components/EntityTable"; import { getEntityTableData } from "components/EntityTable/utils"; import { EntityTableDataItem } from "components/EntityTable/types"; - -import { Routes } from "pages/routes"; import useRouter from "hooks/useRouter"; -import { Source } from "core/resources/Source"; import { useConnectionList } from "hooks/services/useConnectionHook"; import { useSourceDefinitionList } from "hooks/services/useSourceDefinition"; +import { Source } from "core/domain/connector"; type IProps = { sources: Source[]; @@ -27,8 +25,7 @@ const SourcesTable: React.FC = ({ sources }) => { "source" ); - const clickRow = (source: EntityTableDataItem) => - push(`${Routes.Source}/${source.entityId}`); + const clickRow = (source: EntityTableDataItem) => push(`${source.entityId}`); return ( diff --git a/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/CreateSourcePage.tsx b/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/CreateSourcePage.tsx index a678239d42a2..9a6159970c1f 100644 --- a/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/CreateSourcePage.tsx +++ b/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/CreateSourcePage.tsx @@ -4,7 +4,6 @@ import { useResource } from "rest-hooks"; import PageTitle from "components/PageTitle"; import SourceForm from "./components/SourceForm"; -import { Routes } from "../../../routes"; import useRouter from "hooks/useRouter"; import SourceDefinitionResource from "core/resources/SourceDefinition"; import useSource from "hooks/services/useSourceHook"; @@ -46,7 +45,7 @@ const CreateSourcePage: React.FC = () => { setSuccessRequest(true); setTimeout(() => { setSuccessRequest(false); - push(`${Routes.Source}/${result.sourceId}`); + push(`../${result.sourceId}`); }, 2000); } catch (e) { setErrorStatusRequest(e); diff --git a/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/components/SourceForm.tsx b/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/components/SourceForm.tsx index dba54c37f9e6..ed542d50fcdb 100644 --- a/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/components/SourceForm.tsx +++ b/airbyte-webapp/src/pages/SourcesPage/pages/CreateSourcePage/components/SourceForm.tsx @@ -9,8 +9,8 @@ import { JobInfo } from "core/resources/Scheduler"; import { JobsLogItem } from "components/JobItem"; import { createFormErrorMessage } from "utils/errorStatusMessage"; import { ConnectionConfiguration } from "core/domain/connection"; -import { SourceDefinition } from "core/resources/SourceDefinition"; import { useAnalyticsService } from "hooks/services/Analytics/useAnalyticsService"; +import { SourceDefinition } from "core/domain/connector"; type IProps = { onSubmit: (values: { diff --git a/airbyte-webapp/src/pages/SourcesPage/pages/SourceItemPage/SourceItemPage.tsx b/airbyte-webapp/src/pages/SourcesPage/pages/SourceItemPage/SourceItemPage.tsx index 52bdf5c1175f..c91d779147c4 100644 --- a/airbyte-webapp/src/pages/SourcesPage/pages/SourceItemPage/SourceItemPage.tsx +++ b/airbyte-webapp/src/pages/SourcesPage/pages/SourceItemPage/SourceItemPage.tsx @@ -2,7 +2,7 @@ import React, { Suspense, useMemo, useState } from "react"; import { FormattedMessage } from "react-intl"; import { useResource } from "rest-hooks"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import { DropDownRow, ImageBlock } from "components"; import PageTitle from "components/PageTitle"; import useRouter from "hooks/useRouter"; @@ -58,12 +58,10 @@ const SourceItemPage: React.FC = () => { workspaceId: workspace.workspaceId, }); - const onClickBack = () => push(Routes.Source); - const breadcrumbsData = [ { name: , - onClick: onClickBack, + onClick: () => push(".."), }, { name: source.name }, ]; @@ -88,17 +86,16 @@ const SourceItemPage: React.FC = () => { ); const onSelect = (data: DropDownRow.IDataItem) => { - if (data.value === "create-new-item") { - push({ - pathname: `${Routes.Source}${Routes.ConnectionNew}`, - state: { sourceId: source.sourceId }, - }); - } else { - push({ - pathname: `${Routes.Source}${Routes.ConnectionNew}`, - state: { destinationId: data.value, sourceId: source.sourceId }, - }); - } + const path = `../${RoutePaths.ConnectionNew}`; + const state = + data.value === "create-new-item" + ? { sourceId: source.sourceId } + : { + destinationId: data.value, + sourceId: source.sourceId, + }; + + push(path, { state }); }; const renderContent = () => { diff --git a/airbyte-webapp/src/pages/SourcesPage/pages/SourceItemPage/components/SourceConnectionTable.tsx b/airbyte-webapp/src/pages/SourcesPage/pages/SourceItemPage/components/SourceConnectionTable.tsx index 4873ee0b7a60..432946e0d24d 100644 --- a/airbyte-webapp/src/pages/SourcesPage/pages/SourceItemPage/components/SourceConnectionTable.tsx +++ b/airbyte-webapp/src/pages/SourcesPage/pages/SourceItemPage/components/SourceConnectionTable.tsx @@ -2,7 +2,7 @@ import React, { useCallback } from "react"; import { useResource } from "rest-hooks"; import { ConnectionTable } from "components/EntityTable"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import useRouter from "hooks/useRouter"; import { Connection } from "core/resources/Connection"; import useSyncActions from "components/EntityTable/hooks"; @@ -68,7 +68,7 @@ const SourceConnectionTable: React.FC = ({ connections }) => { ); const clickRow = (source: ITableDataItem) => - push(`${Routes.Connections}/${source.connectionId}`); + push(`../../${RoutePaths.Connections}/${source.connectionId}`); return ( { - const { workspace } = useWorkspace(); - const mainRedirect = workspace.displaySetupWizard - ? Routes.Onboarding - : Routes.Connections; + Preferences = "preferences", + Onboarding = "onboarding", + Connections = "connections", + Destination = "destination", + Source = "source", + Settings = "settings", - return ( - - }> - - - - - - - - - - - - - - - - - {workspace.displaySetupWizard && ( - - - - )} - - - - - - - - ); -}; - -const PreferencesRoutes = () => ( - - - - - - -); + Connection = "connection", + ConnectionNew = "new-connection", + SourceNew = "new-source", + DestinationNew = "new-destination", +} function useDemo() { const { formatMessage } = useIntl(); @@ -114,12 +60,7 @@ function useDemo() { useNotificationService(config.isDemo ? demoNotification : undefined); } -export const Routing: React.FC = () => { - useApiHealthPoll(); - useDemo(); - - const { workspace } = useWorkspace(); - +const useAddAnalyticsContextForWorkspace = (workspace: Workspace): void => { const analyticsContext = useMemo( () => ({ workspace_id: workspace.workspaceId, @@ -129,19 +70,97 @@ export const Routing: React.FC = () => { ); useAnalyticsRegisterValues(analyticsContext); useAnalyticsIdentifyUser(workspace.workspaceId); +}; + +const MainViewRoutes: React.FC<{ workspace: Workspace }> = ({ workspace }) => { + return ( + + + + } + /> + } /> + } + /> + } /> + {workspace.displaySetupWizard ? ( + } + /> + ) : null} + + } + /> + + + ); +}; +const PreferencesRoutes = () => ( + + } /> + } /> + +); + +export const AutoSelectFirstWorkspace: React.FC = () => { + const location = useLocation(); + const workspaces = useListWorkspaces(); + const currentWorkspace = workspaces[0]; + + return ( + + ); +}; + +const RoutingWithWorkspace: React.FC = () => { + const workspace = useCurrentWorkspace(); + useAddAnalyticsContextForWorkspace(workspace); + useApiHealthPoll(); + useDemo(); + + return ( + + {workspace.initialSetupComplete ? ( + + ) : ( + + )} + + ); +}; + +export const Routing: React.FC = () => { + // TODO: Remove this after it is verified there are no problems with current routing + const OldRoutes = useMemo( + () => + Object.values(RoutePaths).map((r) => ( + } /> + )), + [] + ); return ( - - }> - {!workspace.initialSetupComplete ? ( - - ) : ( - <> - - - - )} - - + + {OldRoutes} + } /> + } /> + ); }; diff --git a/airbyte-webapp/src/services/workspaces/WorkspacesService.tsx b/airbyte-webapp/src/services/workspaces/WorkspacesService.tsx new file mode 100644 index 000000000000..d1e3c0a3fb67 --- /dev/null +++ b/airbyte-webapp/src/services/workspaces/WorkspacesService.tsx @@ -0,0 +1,79 @@ +import React, { useCallback, useContext, useMemo } from "react"; +import { useQueryClient } from "react-query"; +import { useResetter, useResource } from "rest-hooks"; + +import WorkspaceResource from "core/resources/Workspace"; +import useRouter from "hooks/useRouter"; +import { Workspace } from "core/domain/workspace/Workspace"; + +type Context = { + selectWorkspace: (workspaceId?: string | null | Workspace) => void; + exitWorkspace: () => void; +}; + +export const WorkspaceServiceContext = React.createContext( + null +); + +const useSelectWorkspace = (): (( + workspace?: string | null | Workspace +) => void) => { + const queryClient = useQueryClient(); + const resetCache = useResetter(); + const { push } = useRouter(); + + return useCallback( + async (workspace) => { + if (typeof workspace === "object") { + push(workspace?.workspaceId ?? "/"); + } else { + push(workspace ?? "/"); + } + await queryClient.resetQueries(); + resetCache(); + }, + [push, queryClient, resetCache] + ); +}; + +export const WorkspaceServiceProvider: React.FC = ({ children }) => { + const selectWorkspace = useSelectWorkspace(); + + const ctx = useMemo( + () => ({ + selectWorkspace, + exitWorkspace: () => selectWorkspace(""), + }), + [selectWorkspace] + ); + + return ( + + {children} + + ); +}; + +export const useWorkspaceService = (): Context => { + const workspaceService = useContext(WorkspaceServiceContext); + if (!workspaceService) { + throw new Error( + "useWorkspaceService must be used within a WorkspaceServiceProvider." + ); + } + + return workspaceService; +}; + +export const useCurrentWorkspace = (): Workspace => { + const { params } = useRouter(); + const { workspaceId } = params; + + return useResource(WorkspaceResource.detailShape(), { + workspaceId: workspaceId, + }); +}; + +export const useListWorkspaces = (): Workspace[] => { + return useResource(WorkspaceResource.listShape(), {}).workspaces; +}; diff --git a/airbyte-webapp/src/utils/testutils.tsx b/airbyte-webapp/src/utils/testutils.tsx index 471fc9f3c66e..dac2a0e050b7 100644 --- a/airbyte-webapp/src/utils/testutils.tsx +++ b/airbyte-webapp/src/utils/testutils.tsx @@ -1,8 +1,8 @@ import React from "react"; import { render as rtlRender, RenderResult } from "@testing-library/react"; import { ThemeProvider } from "styled-components"; -import { History, createMemoryHistory } from "history"; -import { Router } from "react-router-dom"; +import { History } from "history"; +import { MemoryRouter } from "react-router-dom"; import { IntlProvider } from "react-intl"; import en from "locales/en.json"; @@ -27,16 +27,9 @@ export function render( return ( - + - - {children} - + {children} diff --git a/airbyte-webapp/src/views/CompleteOauthRequest.tsx b/airbyte-webapp/src/views/CompleteOauthRequest.tsx index a06c859e5a74..1cf4f7f94d99 100644 --- a/airbyte-webapp/src/views/CompleteOauthRequest.tsx +++ b/airbyte-webapp/src/views/CompleteOauthRequest.tsx @@ -1,9 +1,9 @@ import React from "react"; import { LoadingPage } from "components"; -import { useResolveRedirect } from "hooks/services/useConnectorAuth"; +import { useResolveNavigate } from "hooks/services/useConnectorAuth"; const CompleteOauthRequest: React.FC = React.memo(() => { - useResolveRedirect(); + useResolveNavigate(); return ; }); diff --git a/airbyte-webapp/src/views/Connection/TransformationForm/TransformationForm.tsx b/airbyte-webapp/src/views/Connection/TransformationForm/TransformationForm.tsx index e25705a0fe2d..4a4bce49a996 100644 --- a/airbyte-webapp/src/views/Connection/TransformationForm/TransformationForm.tsx +++ b/airbyte-webapp/src/views/Connection/TransformationForm/TransformationForm.tsx @@ -9,7 +9,7 @@ import { Transformation } from "core/domain/connection/operation"; import { equal } from "utils/objects"; import { FormikErrors } from "formik/dist/types"; import { useGetService } from "core/servicesProvider"; -import { OperationService } from "../../../core/domain/connection"; +import { OperationService } from "core/domain/connection"; const Content = styled.div` display: flex; diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/ServiceForm.test.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/ServiceForm.test.tsx index 8188cc5d437c..66c37cc179bd 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/ServiceForm.test.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/ServiceForm.test.tsx @@ -1,6 +1,7 @@ import React from "react"; import userEvent from "@testing-library/user-event"; -import { findByText, screen, waitFor } from "@testing-library/react"; +import { getByTestId, screen, waitFor } from "@testing-library/react"; +import selectEvent from "react-select-event"; import ServiceForm from "views/Connector/ServiceForm"; import { render } from "utils/testutils"; @@ -200,7 +201,7 @@ describe("Service Form", () => { }); }); - describe.skip("filling service form", () => { + describe("filling service form", () => { let result: ServiceFormValues; let container: HTMLElement; beforeEach(() => { @@ -209,9 +210,9 @@ describe("Service Form", () => { formType="source" formValues={{ name: "test-name", serviceType: "test-service-type" }} onSubmit={(values) => (result = values)} - specifications={{ + selectedConnector={{ connectionSpecification: schema, - sourceDefinitionId: "1", + sourceDefinitionId: "test-service-type", documentationUrl: "", }} availableServices={[]} @@ -243,12 +244,11 @@ describe("Service Form", () => { const workTime = container.querySelector( "div[name='connectionConfiguration.workTime']" ); - const priceList = container.querySelector( - "div[data-testid='connectionConfiguration.priceList']" - ); - const addButton = priceList?.querySelector( - "button[data-testid='addItemButton']" + const priceList = getByTestId( + container, + "connectionConfiguration.priceList" ); + const addButton = getByTestId(priceList, "addItemButton"); userEvent.type(name!, "{selectall}{del}name"); userEvent.type(host!, "test-host"); @@ -259,19 +259,17 @@ describe("Service Form", () => { userEvent.type(emails!, "test@test.com{enter}"); userEvent.type(workTime!.querySelector("input")!, "day{enter}"); - await waitFor(() => userEvent.click(addButton!)); + await waitFor(() => userEvent.click(addButton)); const listName = container.querySelector( "input[name='connectionConfiguration.priceList.0.name']" ); const listPrice = container.querySelector( "input[name='connectionConfiguration.priceList.0.price']" ); - const done = priceList?.querySelector( - "button[data-testid='done-button']" - ); + const done = getByTestId(container, "done-button"); userEvent.type(listName!, "test-price-list-name"); userEvent.type(listPrice!, "1"); - await waitFor(() => userEvent.click(done!)); + await waitFor(() => userEvent.click(done)); const submit = container.querySelector("button[type='submit']"); await waitFor(() => userEvent.click(submit!)); @@ -331,16 +329,18 @@ describe("Service Form", () => { "connectionConfiguration.credentials" ); - userEvent.click(credentials); - - const oauth = await findByText(credentials, "oauth"); + const selectContainer = getByTestId( + container, + "connectionConfiguration.credentials" + ); - userEvent.click(oauth); + await selectEvent.select(selectContainer, "oauth", { + container: document.body, + }); const credentialsValue = credentials.querySelector( "input[value='oauth']" ); - const uri = container.querySelector( "input[name='connectionConfiguration.credentials.redirect_uri']" ); @@ -354,11 +354,14 @@ describe("Service Form", () => { "connectionConfiguration.credentials" ); - userEvent.click(credentials); - - const oauth = await findByText(credentials, "oauth"); + const selectContainer = getByTestId( + container, + "connectionConfiguration.credentials" + ); - userEvent.click(oauth); + await selectEvent.select(selectContainer, "oauth", { + container: document.body, + }); const uri = container.querySelector( "input[name='connectionConfiguration.credentials.redirect_uri']" diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/components/Controls/Instruction.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/components/Controls/Instruction.tsx index 96ab25cae1f4..69c15a64d864 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/components/Controls/Instruction.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/components/Controls/Instruction.tsx @@ -3,11 +3,10 @@ import { FormattedMessage } from "react-intl"; import styled from "styled-components"; import { useToggle } from "react-use"; -import { SourceDefinition } from "core/resources/SourceDefinition"; -import { DestinationDefinition } from "core/resources/DestinationDefinition"; import useDocumentation from "hooks/services/useDocumentation"; import { SideView } from "components/SideView"; import { Markdown } from "components/Markdown"; +import { DestinationDefinition, SourceDefinition } from "core/domain/connector"; type IProps = { selectedService: SourceDefinition | DestinationDefinition; diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/FormSection.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/FormSection.tsx index a9e6d3e30573..dd319fe8a942 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/FormSection.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/FormSection.tsx @@ -87,14 +87,10 @@ const FormSection: React.FC<{ ); return ( - <> + {isAuthSection && } - - + + ); })} diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/index.stories.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/index.stories.tsx index 99062a2fcd49..68dbb3752604 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/index.stories.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/index.stories.tsx @@ -5,7 +5,7 @@ import { ContentCard } from "components"; const TempConnector = { name: "Service", - documentationUrl: "http://service.com", + documentationUrl: "", sourceDefinitionId: "serviceId", dockerRepository: "", dockerImageTag: "", @@ -19,6 +19,10 @@ export default { parameters: { actions: { argTypesRegex: "^on.*" } }, args: { formType: "source", + formValues: { + serviceType: TempConnector.sourceDefinitionId, + }, + onSubmit: (v) => console.log(v), availableServices: [TempConnector], }, } as ComponentMeta; @@ -29,8 +33,14 @@ const Template: ComponentStory = (args) => { args.selectedConnector && !(args.selectedConnector as any).sourceDefinitionId ) { - (args.selectedConnector as any).sourceDefinitionId = ""; + (args.selectedConnector as any).sourceDefinitionId = + TempConnector.sourceDefinitionId; } + + if (args.selectedConnector?.documentationUrl) { + args.selectedConnector.documentationUrl = ""; + } + return ( @@ -40,8 +50,8 @@ const Template: ComponentStory = (args) => { export const Common = Template.bind({}); Common.args = { - // @ts-ignore selectedConnector: { + ...TempConnector, connectionSpecification: JSON.parse(`{ "$schema": "http://json-schema.org/draft-07/schema#", "title": "BigQuery Destination Spec", @@ -119,8 +129,7 @@ Common.args = { export const Oneof = Template.bind({}); Oneof.args = { selectedConnector: { - sourceDefinitionId: "", - documentationUrl: "", + ...TempConnector, connectionSpecification: JSON.parse(`{ "$schema": "http://json-schema.org/draft-07/schema#", "title": "MSSQL Source Spec", diff --git a/airbyte-webapp/src/views/common/ErrorOccurredView.tsx b/airbyte-webapp/src/views/common/ErrorOccurredView.tsx index 4e8febb902c3..fba56ac25f5e 100644 --- a/airbyte-webapp/src/views/common/ErrorOccurredView.tsx +++ b/airbyte-webapp/src/views/common/ErrorOccurredView.tsx @@ -1,9 +1,8 @@ import React from "react"; import styled from "styled-components"; -import ContentCard from "components/ContentCard"; import BaseClearView from "components/BaseClearView"; -import { H4 } from "components"; +import { H4, ContentCard } from "components"; const Content = styled(ContentCard)` width: 100%; @@ -13,11 +12,13 @@ const Content = styled(ContentCard)` const ErrorOccurredView: React.FC<{ message: React.ReactNode }> = ({ message, + children, }) => { return (

    {message}

    + {children}
    ); diff --git a/airbyte-webapp/src/views/common/ResorceNotFoundErrorBoundary.tsx b/airbyte-webapp/src/views/common/ResorceNotFoundErrorBoundary.tsx new file mode 100644 index 000000000000..4aca73f07336 --- /dev/null +++ b/airbyte-webapp/src/views/common/ResorceNotFoundErrorBoundary.tsx @@ -0,0 +1,42 @@ +import React from "react"; +import { FormattedMessage } from "react-intl"; + +import { CommonRequestError } from "core/request/CommonRequestError"; + +type BoundaryState = { hasError: boolean; message?: React.ReactNode | null }; + +const initialState: BoundaryState = { + hasError: false, + message: null, +}; + +export class ResourceNotFoundErrorBoundary extends React.Component< + { errorComponent: React.ReactElement }, + BoundaryState +> { + static getDerivedStateFromError(error: CommonRequestError): BoundaryState { + if (error.status === 422) { + return { + hasError: true, + message: , + }; + } else { + throw error; + } + } + + state = initialState; + + reset = (): void => { + this.setState(initialState); + }; + + render(): React.ReactNode { + return this.state.hasError + ? React.cloneElement(this.props.errorComponent, { + message: this.state.message, + onReset: this.reset, + }) + : this.props.children; + } +} diff --git a/airbyte-webapp/src/views/common/StartOverErrorView.tsx b/airbyte-webapp/src/views/common/StartOverErrorView.tsx new file mode 100644 index 000000000000..0d237498d05d --- /dev/null +++ b/airbyte-webapp/src/views/common/StartOverErrorView.tsx @@ -0,0 +1,34 @@ +import React from "react"; +import { FormattedMessage } from "react-intl"; +import styled from "styled-components"; + +import useRouter from "hooks/useRouter"; +import { ErrorOccurredView } from "views/common/ErrorOccurredView"; +import { Button } from "components"; + +const ResetSection = styled.div` + margin-top: 30px; +`; + +export const StartOverErrorView: React.FC<{ + message?: string; + onReset?: () => void; +}> = ({ message, onReset }) => { + const { push } = useRouter(); + return ( + } + > + + + + + ); +}; diff --git a/airbyte-webapp/src/views/layout/MainView/MainView.tsx b/airbyte-webapp/src/views/layout/MainView/MainView.tsx index 7adae631b0e3..1bb390fbbe07 100644 --- a/airbyte-webapp/src/views/layout/MainView/MainView.tsx +++ b/airbyte-webapp/src/views/layout/MainView/MainView.tsx @@ -1,6 +1,8 @@ -import React from "react"; +import React, { Suspense } from "react"; import styled from "styled-components"; + import SideBar from "views/layout/SideBar"; +import LoadingPage from "components/LoadingPage"; const MainContainer = styled.div` width: 100%; @@ -19,7 +21,9 @@ const Content = styled.div` const MainView: React.FC = (props) => ( - {props.children} + + }>{props.children} + ); diff --git a/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx b/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx index bd12694d43c5..115f562779b4 100644 --- a/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx +++ b/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx @@ -5,9 +5,9 @@ import { faRocket } from "@fortawesome/free-solid-svg-icons"; import { FormattedMessage } from "react-intl"; import { NavLink } from "react-router-dom"; -import { Routes } from "pages/routes"; +import { RoutePaths } from "pages/routes"; import { useConfig } from "config"; -import useWorkspace from "hooks/services/useWorkspace"; +import { useCurrentWorkspace } from "hooks/services/useWorkspace"; import { Link } from "components"; import Version from "components/Version"; @@ -91,7 +91,7 @@ const HelpIcon = styled(FontAwesomeIcon)` const SideBar: React.FC = () => { const config = useConfig(); - const { workspace } = useWorkspace(); + const workspace = useCurrentWorkspace(); return ( @@ -99,8 +99,8 @@ const SideBar: React.FC = () => { logo @@ -108,7 +108,7 @@ const SideBar: React.FC = () => { {workspace.displaySetupWizard ? (
  • - + @@ -117,7 +117,7 @@ const SideBar: React.FC = () => {
  • ) : null}
  • - + @@ -125,7 +125,7 @@ const SideBar: React.FC = () => {
  • - + @@ -133,7 +133,7 @@ const SideBar: React.FC = () => {
  • - + @@ -172,11 +172,10 @@ const SideBar: React.FC = () => {
  • - location.pathname.startsWith(Routes.Settings) - } + to={RoutePaths.Settings} + // isActive={(_, location) => + // location.pathname.startsWith(RoutePaths.Settings) + // } > diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index c6d44144d85e..e996f9ca01e1 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -23,7 +23,7 @@ ENV APPLICATION airbyte-workers WORKDIR /app # Move worker app -ADD bin/${APPLICATION}-0.33.12-alpha.tar /app +ADD bin/${APPLICATION}-0.34.1-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.33.12-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.34.1-alpha/bin/${APPLICATION}"] diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java b/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java index 7464422c207b..931386a52274 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java @@ -117,6 +117,7 @@ public class KubePodProcess extends Process { // This variable should be set in functions where the pod is forcefully terminated. See // getReturnCode() for more info. private final AtomicBoolean wasKilled = new AtomicBoolean(false); + private final AtomicBoolean wasClosed = new AtomicBoolean(false); private final OutputStream stdin; private InputStream stdout; @@ -517,6 +518,14 @@ public Info info() { * implementation with OS processes and resources, which are automatically reaped by the OS. */ private void close() { + final boolean previouslyClosed = wasClosed.getAndSet(true); + + // short-circuit if close was already called, so we don't re-offer ports multiple times + // since the offer call is non-atomic + if (previouslyClosed) { + return; + } + if (this.stdin != null) { Exceptions.swallow(this.stdin::close); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java b/airbyte-workers/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java index 817041d7fa6b..9ac221321fce 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java @@ -101,6 +101,7 @@ public Process create(final String jobId, try { // used to differentiate source and destination processes with the same id and attempt final String podName = createPodName(imageName, jobId, attempt); + LOGGER.info("Attempting to start pod = {}", podName); final int stdoutLocalPort = KubePortManagerSingleton.getInstance().take(); LOGGER.info("{} stdoutLocalPort = {}", podName, stdoutLocalPort); @@ -173,7 +174,6 @@ protected static String createPodName(final String fullImagePath, final String j imageName = imageName.substring(extra); podName = imageName + "-" + suffix; } - System.out.println(podName); final Matcher m = ALPHABETIC.matcher(podName); // Since we add sync-UUID as a suffix a couple of lines up, there will always be a substring // starting with an alphabetic character. diff --git a/airbyte-workers/src/test-integration/java/io/airbyte/workers/process/KubePodProcessIntegrationTest.java b/airbyte-workers/src/test-integration/java/io/airbyte/workers/process/KubePodProcessIntegrationTest.java index 733b53773972..27ebf78542c5 100644 --- a/airbyte-workers/src/test-integration/java/io/airbyte/workers/process/KubePodProcessIntegrationTest.java +++ b/airbyte-workers/src/test-integration/java/io/airbyte/workers/process/KubePodProcessIntegrationTest.java @@ -27,6 +27,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.apache.commons.lang.RandomStringUtils; import org.junit.jupiter.api.AfterEach; @@ -90,6 +91,50 @@ public void testSuccessfulSpawning() throws Exception { assertEquals(0, process.exitValue()); } + @Test + public void testPortsReintroducedIntoPoolOnlyOnce() throws Exception { + final var availablePortsBefore = KubePortManagerSingleton.getInstance().getNumAvailablePorts(); + + // run a finite process + final Process process = getProcess("echo hi; sleep 1; echo hi2"); + process.waitFor(); + + // the pod should be dead and in a good state + assertFalse(process.isAlive()); + + // run a background process to continuously consume available ports + final var portsTaken = new ArrayList(); + final var executor = Executors.newSingleThreadExecutor(); + + executor.submit(() -> { + try { + while (true) { + portsTaken.add(KubePortManagerSingleton.getInstance().take()); + } + } catch (InterruptedException e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + }); + + // repeatedly call exitValue (and therefore the close method) + for (int i = 0; i < 100; i++) { + // if exitValue no longer calls close in the future this test will fail and need to be updated. + process.exitValue(); + } + + // stop taking from available ports + executor.shutdownNow(); + + // prior to fixing this race condition, the close method would offer ports every time it was called. + // without the race condition, we should have only been able to pull each of the originally + // available ports once + assertEquals(availablePortsBefore, portsTaken.size()); + + // release ports for next tests + portsTaken.forEach(KubePortManagerSingleton.getInstance()::offer); + } + @Test public void testSuccessfulSpawningWithQuotes() throws Exception { // start a finite process diff --git a/buildSrc/src/main/groovy/airbyte-integration-test-java.gradle b/buildSrc/src/main/groovy/airbyte-integration-test-java.gradle index 9d3bc6b98602..e7865a585445 100644 --- a/buildSrc/src/main/groovy/airbyte-integration-test-java.gradle +++ b/buildSrc/src/main/groovy/airbyte-integration-test-java.gradle @@ -45,6 +45,9 @@ class AirbyteIntegrationTestJavaPlugin implements Plugin { maxHeapSize = '3g' mustRunAfter project.test + + // This is needed to make the destination-snowflake tests succeed - https://github.com/snowflakedb/snowflake-jdbc/issues/589#issuecomment-983944767 + jvmArgs = ["--add-opens=java.base/java.nio=ALL-UNNAMED"] } // make sure we create the integrationTest task once in case a standard source test was already initialized diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index df8548ccad58..3039ae5ac840 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.33.12-alpha" +appVersion: "0.34.1-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index e5d4e0caff7f..5a3aec4e269a 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -29,7 +29,7 @@ | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.33.12-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.34.1-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.service.type` | The service type to use for the webapp service | `ClusterIP` | | `webapp.service.port` | The service port to expose the webapp on | `80` | @@ -55,7 +55,7 @@ | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.33.12-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.34.1-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | | `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | @@ -86,7 +86,7 @@ | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.33.12-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.34.1-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | | `server.livenessProbe.initialDelaySeconds` | Initial delay seconds for livenessProbe | `30` | @@ -120,7 +120,7 @@ | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.33.12-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.34.1-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | | `worker.livenessProbe.initialDelaySeconds` | Initial delay seconds for livenessProbe | `30` | diff --git a/charts/airbyte/templates/_helpers.tpl b/charts/airbyte/templates/_helpers.tpl index d835cb58efcf..87daf486b855 100644 --- a/charts/airbyte/templates/_helpers.tpl +++ b/charts/airbyte/templates/_helpers.tpl @@ -160,9 +160,9 @@ Add environment variables to configure minio */}} {{- define "airbyte.minio.endpoint" -}} {{- if .Values.logs.minio.enabled -}} - {{- printf "http://%s:%s" (include "airbyte.minio.fullname" .) "9000" -}} + {{- printf "http://%s:%d" (include "airbyte.minio.fullname" .) 9000 -}} {{- else if .Values.logs.externalMinio.enabled -}} - {{- printf "http://%s:%s" .Values.logs.externalMinio.host .Values.logs.externalMinio.port -}} + {{- printf "http://%s:%d" .Values.logs.externalMinio.host .Values.logs.externalMinio.port -}} {{- else -}} {{- printf "" -}} {{- end -}} diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index e5b72983dc46..60bd9279ed10 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -43,7 +43,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.33.12-alpha + tag: 0.34.1-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -140,7 +140,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.33.12-alpha + tag: 0.34.1-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -245,7 +245,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.33.12-alpha + tag: 0.34.1-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -357,7 +357,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.33.12-alpha + tag: 0.34.1-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -549,3 +549,9 @@ logs: gcs: bucket: "" credentials: "" + +minio: + accessKey: + password: minio + secretKey: + password: minio123 diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 5b4ec92abf92..c237c61a1cba 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -103,6 +103,7 @@ * [Notion](integrations/sources/notion.md) * [Okta](integrations/sources/okta.md) * [OneSignal](integrations/sources/onesignal.md) + * [OpenWeather](integrations/sources/openweather.md) * [Oracle DB](integrations/sources/oracle.md) * [Oracle Peoplesoft](integrations/sources/oracle-peoplesoft.md) * [Oracle Siebel CRM](integrations/sources/oracle-siebel-crm.md) diff --git a/docs/integrations/README.md b/docs/integrations/README.md index 550fae22fad6..0fd81cf413a8 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -79,6 +79,7 @@ Airbyte uses a grading system for connectors to help users understand what to ex | [Notion](sources/notion.md) | Alpha | | [Okta](sources/okta.md) | Beta | | [OneSignal](sources/onesignal.md) | Alpha | +| [OpenWeather](sources/openweather.md) | Alpha | | [Oracle DB](sources/oracle.md) | Certified | | [Oracle PeopleSoft](sources/oracle-peoplesoft.md) | Beta | | [Oracle Siebel CRM](sources/oracle-siebel-crm.md) | Beta | diff --git a/docs/integrations/destinations/bigquery.md b/docs/integrations/destinations/bigquery.md index 99873e08b91c..4a534c30a27b 100644 --- a/docs/integrations/destinations/bigquery.md +++ b/docs/integrations/destinations/bigquery.md @@ -155,6 +155,7 @@ Therefore, Airbyte BigQuery destination will convert any invalid characters into | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.5.1 | 2021-12-16 | [\#8816](https://github.com/airbytehq/airbyte/issues/8816) | Update dataset locations | | 0.5.0 | 2021-10-26 | [\#7240](https://github.com/airbytehq/airbyte/issues/7240) | Output partitioned/clustered tables | | 0.4.1 | 2021-10-04 | [\#6733](https://github.com/airbytehq/airbyte/issues/6733) | Support dataset starting with numbers | | 0.4.0 | 2021-08-26 | [\#5296](https://github.com/airbytehq/airbyte/issues/5296) | Added GCS Staging uploading option | @@ -169,6 +170,7 @@ Therefore, Airbyte BigQuery destination will convert any invalid characters into | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.11 | 2021-12-16 | [\#8816](https://github.com/airbytehq/airbyte/issues/8816) | Update dataset locations | | 0.1.10 | 2021-11-09 | [\#7804](https://github.com/airbytehq/airbyte/pull/7804) | handle null values in fields described by a $ref definition | | 0.1.9 | 2021-11-08 | [\#7736](https://github.com/airbytehq/airbyte/issues/7736) | Fixed the handling of ObjectNodes with $ref definition key | | 0.1.8 | 2021-10-27 | [\#7413](https://github.com/airbytehq/airbyte/issues/7413) | Fixed DATETIME conversion for BigQuery | diff --git a/docs/integrations/destinations/redshift.md b/docs/integrations/destinations/redshift.md index 36b7d3cab067..3437d4079a57 100644 --- a/docs/integrations/destinations/redshift.md +++ b/docs/integrations/destinations/redshift.md @@ -85,6 +85,12 @@ Provide the required S3 info. * **Part Size** * Affects the size limit of an individual Redshift table. Optional. Increase this if syncing tables larger than 100GB. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default table limit of 100GB. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. +Optional parameters: +* **Bucket Path** + * The directory within the S3 bucket to place the staging data. For example, if you set this to `yourFavoriteSubdirectory`, we will place the staging data inside `s3://yourBucket/yourFavoriteSubdirectory`. If not provided, defaults to the root directory. +* **Purge Staging Data** + * Whether to delete the staging files from S3 after completing the sync. Specifically, the connector will create CSV files named `bucketPath/namespace/streamName/syncDate_epochMillis_randomUuid.csv` containing three columns (`ab_id`, `data`, `emitted_at`). Normally these files are deleted after the `COPY` command completes; if you want to keep them for other purposes, set `purge_staging_data` to `false`. + ## Notes about Redshift Naming Conventions From [Redshift Names & Identifiers](https://docs.aws.amazon.com/redshift/latest/dg/r_names.html): @@ -118,6 +124,8 @@ All Redshift connections are encrypted using SSL | Version | Date | Pull Request | Subject | | :------ | :-------- | :----- | :------ | +| 0.3.23 | 2021-12-16 | [\#8855](https://github.com/airbytehq/airbyte/pull/8855) | Add `purgeStagingData` option to enable/disable deleting the staging data | +| 0.3.22 | 2021-12-15 | [#8607](https://github.com/airbytehq/airbyte/pull/8607) | Accept a path for the staging data | | 0.3.21 | 2021-12-10 | [#8562](https://github.com/airbytehq/airbyte/pull/8562) | Moving classes around for better dependency management | | 0.3.20 | 2021-11-08 | [#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | | 0.3.19 | 2021-10-21 | [7234](https://github.com/airbytehq/airbyte/pull/7234) | Allow SSL traffic only | diff --git a/docs/integrations/destinations/s3.md b/docs/integrations/destinations/s3.md index e23b6777c363..78ae79ba8c50 100644 --- a/docs/integrations/destinations/s3.md +++ b/docs/integrations/destinations/s3.md @@ -223,6 +223,7 @@ Under the hood, an Airbyte data stream in Json schema is first converted to an A | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.2.0 | 2021-12-15 | [\#8607](https://github.com/airbytehq/airbyte/pull/8607) | Change the output filename for CSV files - it's now `bucketPath/namespace/streamName/timestamp_epochMillis_randomUuid.csv` | | 0.1.16 | 2021-12-10 | [\#8562](https://github.com/airbytehq/airbyte/pull/8562) | Swap dependencies with destination-jdbc. | | 0.1.15 | 2021-12-03 | [\#8501](https://github.com/airbytehq/airbyte/pull/8501) | Remove excessive logging for Avro and Parquet invalid date strings. | | 0.1.14 | 2021-11-09 | [\#7732](https://github.com/airbytehq/airbyte/pull/7732) | Support timestamp in Avro and Parquet | diff --git a/docs/integrations/destinations/snowflake.md b/docs/integrations/destinations/snowflake.md index b84bbc84f841..74e4db39efbb 100644 --- a/docs/integrations/destinations/snowflake.md +++ b/docs/integrations/destinations/snowflake.md @@ -152,6 +152,8 @@ By default, Airbyte uses batches of `INSERT` commands to add data to a temporary Internal named stages are storage location objects within a Snowflake database/schema. Because they are database objects, the same security permissions apply as with any other database objects. No need to provide additional properties for internal staging +**Operating on a stage also requires the USAGE privilege on the parent database and schema.** + ### AWS S3 For AWS S3, you will need to create a bucket and provide credentials to access the bucket. We recommend creating a bucket that is only used for Airbyte to stage data to Snowflake. Airbyte needs read/write access to interact with this bucket. @@ -194,6 +196,7 @@ Finally, you need to add read/write permissions to your bucket with that email. | Version | Date | Pull Request | Subject | | :------ | :-------- | :----- | :------ | +| 0.3.21 | 2021-12-15 | [#8781](https://github.com/airbytehq/airbyte/pull/8781) | Updated check method to verify permissions to create/drop stage for internal staging; compatibility fix for Java 17 | | 0.3.20 | 2021-12-10 | [#8562](https://github.com/airbytehq/airbyte/pull/8562) | Moving classes around for better dependency management; compatibility fix for Java 17 | | 0.3.19 | 2021-12-06 | [#8528](https://github.com/airbytehq/airbyte/pull/8528) | Set Internal Staging as default choice | | 0.3.18 | 2021-11-26 | [#8253](https://github.com/airbytehq/airbyte/pull/8253) | Snowflake Internal Staging Support | diff --git a/docs/integrations/getting-started/destination-redshift.md b/docs/integrations/getting-started/destination-redshift.md index c62d584bdac0..ae59b0eeff95 100644 --- a/docs/integrations/getting-started/destination-redshift.md +++ b/docs/integrations/getting-started/destination-redshift.md @@ -1,4 +1,4 @@ -# Getting Started: Destination Redshift +# Getting Started: Destination Redshift ## Requirements @@ -42,6 +42,10 @@ Provide the required S3 info. * **Part Size** * Affects the size limit of an individual Redshift table. Optional. Increase this if syncing tables larger than 100GB. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default table limit of 100GB. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. +Optional parameters: +* **Bucket Path** + * The directory within the S3 bucket to place the staging data. For example, if you set this to `yourFavoriteSubdirectory`, staging data will be placed inside `s3://yourBucket/yourFavoriteSubdirectory`. If not provided, defaults to the root directory. + ## Notes about Redshift Naming Conventions From [Redshift Names & Identifiers](https://docs.aws.amazon.com/redshift/latest/dg/r_names.html): diff --git a/docs/integrations/sources/amazon-seller-partner.md b/docs/integrations/sources/amazon-seller-partner.md index 4539c6fe9a1f..43b2f3bae58c 100644 --- a/docs/integrations/sources/amazon-seller-partner.md +++ b/docs/integrations/sources/amazon-seller-partner.md @@ -24,6 +24,7 @@ This source is capable of syncing the following streams: * [Orders](https://github.com/amzn/selling-partner-api-docs/blob/main/references/orders-api/ordersV0.md) \(incremental\) * [VendorDirectFulfillmentShipping](https://github.com/amzn/selling-partner-api-docs/blob/main/references/vendor-direct-fulfillment-shipping-api/vendorDirectFulfillmentShippingV1.md) * [Seller Feedback Report](https://github.com/amzn/selling-partner-api-docs/blob/main/references/reports-api/reporttype-values.md#performance-reports) +* [Brand Analytics Search Terms Report](https://github.com/amzn/selling-partner-api-docs/blob/main/references/reports-api/reporttype-values.md#brand-analytics-reports) ## Getting started @@ -63,6 +64,7 @@ Information about rate limits you may find [here](https://github.com/amzn/sellin | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| `0.2.6` | 2021-12-10 | [\#8179](https://github.com/airbytehq/airbyte/pull/8179) | Add GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT report | | `0.2.5` | 2021-12-06 | [\#8425](https://github.com/airbytehq/airbyte/pull/8425) | Update title, description fields in spec | | `0.2.4` | 2021-11-08 | [\#8021](https://github.com/airbytehq/airbyte/pull/8021) | Added GET_SELLER_FEEDBACK_DATA report with incremental sync capability | | `0.2.3` | 2021-11-08 | [\#7828](https://github.com/airbytehq/airbyte/pull/7828) | Remove datetime format from all streams | diff --git a/docs/integrations/sources/bing-ads.md b/docs/integrations/sources/bing-ads.md index 8ce0f2515211..4631f0970b07 100644 --- a/docs/integrations/sources/bing-ads.md +++ b/docs/integrations/sources/bing-ads.md @@ -77,6 +77,7 @@ Be aware that `refresh token` will expire in 90 days. You need to repeat auth pr | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.2 | 2021-12-14 | [8429](https://github.com/airbytehq/airbyte/pull/8429) | Update titles and descriptions | | 0.1.1 | 2021-08-31 | [5750](https://github.com/airbytehq/airbyte/pull/5750) | Added reporting streams\) | | 0.1.0 | 2021-07-22 | [4911](https://github.com/airbytehq/airbyte/pull/4911) | Initial release supported core streams \(Accounts, Campaigns, Ads, AdGroups\) | diff --git a/docs/integrations/sources/clickhouse.md b/docs/integrations/sources/clickhouse.md index ce0524144ea9..f17e00fdaeae 100644 --- a/docs/integrations/sources/clickhouse.md +++ b/docs/integrations/sources/clickhouse.md @@ -78,6 +78,7 @@ Using this feature requires additional configuration, when creating the source. | Version | Date | Pull Request | Subject | |:--------| :--- |:---------------------------------------------------------|:-----------------------------------------------------------------| +| 0.1.6 | 2021-12-15 | [\#8429](https://github.com/airbytehq/airbyte/pull/8429) | Update titles and descriptions | | 0.1.5 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | | 0.1.4 | 20.10.2021 | [\#7327](https://github.com/airbytehq/airbyte/pull/7327) | Added support for connection via SSH tunnel(aka Bastion server). | | 0.1.3 | 20.10.2021 | [\#7127](https://github.com/airbytehq/airbyte/pull/7127) | Added SSL connections support. | @@ -90,4 +91,4 @@ Using this feature requires additional configuration, when creating the source. |:---| :--- |:---------------------------------------------------------|:---------------------------------------------------------------------------| | 0.1.2 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | | 0.1.1 | 20.10.2021 | [\#7327](https://github.com/airbytehq/airbyte/pull/7327) | Added support for connection via SSH tunnel(aka Bastion server). | -| 0.1.0 | 20.10.2021 | [\#7127](https://github.com/airbytehq/airbyte/pull/7127) | Added source-clickhouse-strict-encrypt that supports SSL connections only. | \ No newline at end of file +| 0.1.0 | 20.10.2021 | [\#7127](https://github.com/airbytehq/airbyte/pull/7127) | Added source-clickhouse-strict-encrypt that supports SSL connections only. | diff --git a/docs/integrations/sources/drift.md b/docs/integrations/sources/drift.md index a67f3d977ffa..7ed072f00b68 100644 --- a/docs/integrations/sources/drift.md +++ b/docs/integrations/sources/drift.md @@ -51,5 +51,6 @@ The Drift connector should not run into Drift API limitations under normal usage | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.2.5 | 2021-12-14 | [8429](https://github.com/airbytehq/airbyte/pull/8429) | Updated titles and descriptions | | 0.2.3 | 2021-10-25 | [7337](https://github.com/airbytehq/airbyte/pull/7337) | Added support of `OAuth 2.0` authorisation option | -| `0.2.3` | 2021-10-27 | [7247](https://github.com/airbytehq/airbyte/pull/7247) | Migrate to the CDK | \ No newline at end of file +| `0.2.3` | 2021-10-27 | [7247](https://github.com/airbytehq/airbyte/pull/7247) | Migrate to the CDK | diff --git a/docs/integrations/sources/facebook-marketing.md b/docs/integrations/sources/facebook-marketing.md index 285114e1868d..e54659560eb2 100644 --- a/docs/integrations/sources/facebook-marketing.md +++ b/docs/integrations/sources/facebook-marketing.md @@ -96,6 +96,9 @@ As a summary, custom insights allows to replicate only some fields, resulting in | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.2.29 | 2021-12-17 | [8649](https://github.com/airbytehq/airbyte/pull/8649) | Retrive ad_creatives image as data encoded | +| 0.2.28 | 2021-12-13 | [8742](https://github.com/airbytehq/airbyte/pull/8742) | Fix for schema generation related to "breakdown" fields | +| 0.2.27 | 2021-11-29 | [8257](https://github.com/airbytehq/airbyte/pull/8257) | Add fields to Campaign stream | | 0.2.26 | 2021-11-19 | [7855](https://github.com/airbytehq/airbyte/pull/7855) | Add Video stream | | 0.2.25 | 2021-11-12 | [7904](https://github.com/airbytehq/airbyte/pull/7904) | Implement retry logic for async jobs | | 0.2.24 | 2021-11-09 | [7744](https://github.com/airbytehq/airbyte/pull/7744) | Fix fail when async job takes too long | diff --git a/docs/integrations/sources/google-analytics-v4.md b/docs/integrations/sources/google-analytics-v4.md index 4804334945eb..a68f12dc2f6c 100644 --- a/docs/integrations/sources/google-analytics-v4.md +++ b/docs/integrations/sources/google-analytics-v4.md @@ -132,6 +132,7 @@ The Google Analytics connector should not run into Google Analytics API limitati | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.14 | 2021-12-09 | [8656](https://github.com/airbytehq/airbyte/pull/8656) | Fix date-format in schemas | | 0.1.13 | 2021-12-09 | [8676](https://github.com/airbytehq/airbyte/pull/8676) | Fix `window_in_days` validation issue | | 0.1.12 | 2021-12-03 | [8175](https://github.com/airbytehq/airbyte/pull/8175) | Fix validation of unknown metric(s) or dimension(s) error | | 0.1.11 | 2021-11-30 | [8264](https://github.com/airbytehq/airbyte/pull/8264) | Corrected date range | diff --git a/docs/integrations/sources/harvest.md b/docs/integrations/sources/harvest.md index 1e9f59c2511e..6349fcaf35fd 100644 --- a/docs/integrations/sources/harvest.md +++ b/docs/integrations/sources/harvest.md @@ -69,6 +69,7 @@ See [docs](https://help.getharvest.com/api-v2/authentication-api/authentication/ | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.8 | 2021-12-14 | [8429](https://github.com/airbytehq/airbyte/pull/8429) | Update titles and descriptions | | 0.1.6 | 2021-11-14 | [7952](https://github.com/airbytehq/airbyte/pull/7952) | Implement OAuth 2.0 support | | 0.1.5 | 2021-09-28 | [5747](https://github.com/airbytehq/airbyte/pull/5747) | Update schema date-time fields | | 0.1.4 | 2021-06-22 | [5701](https://github.com/airbytehq/airbyte/pull/5071) | Harvest normalization failure: fixing the schemas | diff --git a/docs/integrations/sources/hubspot.md b/docs/integrations/sources/hubspot.md index 591371186e7a..d1c6ccb7b5c2 100644 --- a/docs/integrations/sources/hubspot.md +++ b/docs/integrations/sources/hubspot.md @@ -18,21 +18,21 @@ Check out common troubleshooting issues for the HubSpot connector on our Discour This source is capable of syncing the following tables and their data: * [Campaigns](https://developers.hubspot.com/docs/methods/email/get_campaign_data) -* [Companies](https://developers.hubspot.com/docs/api/crm/companies) -* [Contact Lists](http://developers.hubspot.com/docs/methods/lists/get_lists) -* [Contacts](https://developers.hubspot.com/docs/methods/contacts/get_contacts) +* [Companies](https://developers.hubspot.com/docs/api/crm/companies) \(Incremental\) +* [Contact Lists](http://developers.hubspot.com/docs/methods/lists/get_lists) \(Incremental\) +* [Contacts](https://developers.hubspot.com/docs/methods/contacts/get_contacts) \(Incremental\) * [Deal Pipelines](https://developers.hubspot.com/docs/methods/pipelines/get_pipelines_for_object_type) -* [Deals](https://developers.hubspot.com/docs/api/crm/deals) \(including Contact associations\) +* [Deals](https://developers.hubspot.com/docs/api/crm/deals) \(including Contact associations\) \(Incremental\) * [Email Events](https://developers.hubspot.com/docs/methods/email/get_events) \(Incremental\) * [Engagements](https://legacydocs.hubspot.com/docs/methods/engagements/get-all-engagements) * [Forms](https://developers.hubspot.com/docs/api/marketing/forms) -* [Line Items](https://developers.hubspot.com/docs/api/crm/line-items) +* [Line Items](https://developers.hubspot.com/docs/api/crm/line-items) \(Incremental\) * [Marketing Emails](https://legacydocs.hubspot.com/docs/methods/cms_email/get-all-marketing-email-statistics) * [Owners](https://developers.hubspot.com/docs/methods/owners/get_owners) -* [Products](https://developers.hubspot.com/docs/api/crm/products) -* [Quotes](https://developers.hubspot.com/docs/api/crm/quotes) +* [Products](https://developers.hubspot.com/docs/api/crm/products) \(Incremental\) +* [Quotes](https://developers.hubspot.com/docs/api/crm/quotes) \(Incremental\) * [Subscription Changes](https://developers.hubspot.com/docs/methods/email/get_subscriptions_timeline) \(Incremental\) -* [Tickets](https://developers.hubspot.com/docs/api/crm/tickets) +* [Tickets](https://developers.hubspot.com/docs/api/crm/tickets) \(Incremental\) * [Workflows](https://legacydocs.hubspot.com/docs/methods/workflows/v3/get_workflows) **Note**: HubSpot API currently only supports `quotes` endpoint using API Key, using Oauth it is impossible to access this stream (as reported by [community.hubspot.com](https://community.hubspot.com/t5/APIs-Integrations/Help-with-using-Feedback-CRM-API-and-Quotes-CRM-API/m-p/449104/highlight/true#M44411)). @@ -96,9 +96,12 @@ If you are using Oauth, most of the streams require the appropriate [scopes](htt | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | -| 0.1.26 | 2021-11-30 | [8329](https://github.com/airbytehq/airbyte/pull/8329) | removed 'skip_dynamic_fields' config param | -| 0.1.25 | 2021-11-23 | [8216](https://github.com/airbytehq/airbyte/pull/8216) | skip dynamic fields for testing only | -| 0.1.24 | 2021-11-09 | [7683](https://github.com/airbytehq/airbyte/pull/7683) | bugfix 'Hubspot' -> 'HubSpot' | +| 0.1.29 | 2021-12-17 | [8699](https://github.com/airbytehq/airbyte/pull/8699) | Add incremental sync support for `companies`, `contact_lists`, `contacts`, `deals`, `line_items`, `products`, `quotes`, `tickets` streams | +| 0.1.28 | 2021-12-15 | [8429](https://github.com/airbytehq/airbyte/pull/8429) | Update fields and descriptions | +| 0.1.27 | 2021-12-09 | [8658](https://github.com/airbytehq/airbyte/pull/8658) | Fixed config backward compatibility issue by allowing additional properties in the spec | +| 0.1.26 | 2021-11-30 | [8329](https://github.com/airbytehq/airbyte/pull/8329) | Removed 'skip_dynamic_fields' config param | +| 0.1.25 | 2021-11-23 | [8216](https://github.com/airbytehq/airbyte/pull/8216) | Add skip dynamic fields for testing only | +| 0.1.24 | 2021-11-09 | [7683](https://github.com/airbytehq/airbyte/pull/7683) | Fix name issue 'Hubspot' -> 'HubSpot' | | 0.1.23 | 2021-11-08 | [7730](https://github.com/airbytehq/airbyte/pull/7730) | Fix oAuth flow schema| | 0.1.22 | 2021-11-03 | [7562](https://github.com/airbytehq/airbyte/pull/7562) | Migrate Hubspot source to CDK structure | | 0.1.21 | 2021-10-27 | [7405](https://github.com/airbytehq/airbyte/pull/7405) | Change of package `import` from `urllib` to `urllib.parse` | diff --git a/docs/integrations/sources/intercom.md b/docs/integrations/sources/intercom.md index 103da21f7cd2..0d01b412efdd 100644 --- a/docs/integrations/sources/intercom.md +++ b/docs/integrations/sources/intercom.md @@ -55,6 +55,7 @@ Please read [How to get your Access Token](https://developers.intercom.com/build | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.12 | 2021-12-14 | [8429](https://github.com/airbytehq/airbyte/pull/8429) | Updated fields and descriptions | | 0.1.11 | 2021-12-13 | [8685](https://github.com/airbytehq/airbyte/pull/8685) | Remove time.sleep for rate limit | | 0.1.10 | 2021-12-10 | [8637](https://github.com/airbytehq/airbyte/pull/8637) | Fix 'conversations' order and sorting. Correction of the companies stream| | 0.1.9 | 2021-12-03 | [8395](https://github.com/airbytehq/airbyte/pull/8395) | Fix backoff of 'companies' stream | diff --git a/docs/integrations/sources/linnworks.md b/docs/integrations/sources/linnworks.md index d62a7fa0a906..91423978e3f8 100644 --- a/docs/integrations/sources/linnworks.md +++ b/docs/integrations/sources/linnworks.md @@ -54,7 +54,8 @@ Authentication credentials can be obtained on developer portal section Applicati | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | -| 0.2.0 | 2021-11-24 | [8169](https://github.com/airbytehq/airbyte/pull/8169) | Source Linnworks: refactor stream StockLocations | +| 0.1.4 | 2021-11-24 | [8226](https://github.com/airbytehq/airbyte/pull/8226) | Source Linnworks: improve streams ProcessedOrders and ProcessedOrderDetails | +| 0.1.3 | 2021-11-24 | [8169](https://github.com/airbytehq/airbyte/pull/8169) | Source Linnworks: refactor stream StockLocations | | 0.1.2 | 2021-11-23 | [8177](https://github.com/airbytehq/airbyte/pull/8177) | Source Linnworks: add stream ProcessedOrderDetails | | 0.1.0 | 2021-11-09 | [7588](https://github.com/airbytehq/airbyte/pull/7588) | New Source: Linnworks | diff --git a/docs/integrations/sources/microsoft-teams.md b/docs/integrations/sources/microsoft-teams.md index 77987dc8fd0a..1adde3295778 100644 --- a/docs/integrations/sources/microsoft-teams.md +++ b/docs/integrations/sources/microsoft-teams.md @@ -159,5 +159,6 @@ Token acquiring implemented by [instantiate](https://docs.microsoft.com/en-us/az | Version | Date | Pull Request | Subject | |:--------|:-----------| :--- | :--- | +| 0.2.5 | 2021-12-14 | [8429](https://github.com/airbytehq/airbyte/pull/8429) | Update titles and descriptions | | 0.2.4 | 2021-12-07 | [7807](https://github.com/airbytehq/airbyte/pull/7807) | Implement OAuth support | | 0.2.3 | 2021-12-06 | [8469](https://github.com/airbytehq/airbyte/pull/8469) | Migrate to the CDK | diff --git a/docs/integrations/sources/monday.md b/docs/integrations/sources/monday.md index 4586efe2f0c1..5ee2149f258f 100644 --- a/docs/integrations/sources/monday.md +++ b/docs/integrations/sources/monday.md @@ -44,5 +44,6 @@ You can get the API key for Monday by going to Profile picture (bottom left) => | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.2 | 2021-12-07 | [8429](https://github.com/airbytehq/airbyte/pull/8429) | Update titles and descriptions | | 0.1.1 | 2021-11-18 | [8016](https://github.com/airbytehq/airbyte/pull/8016) | 🐛 Source Monday: fix pagination and schema bug | | 0.1.0 | 2021-11-07 | [7168](https://github.com/airbytehq/airbyte/pull/7168) | 🎉 New Source: Monday | diff --git a/docs/integrations/sources/openweather.md b/docs/integrations/sources/openweather.md new file mode 100644 index 000000000000..dba899a2e39d --- /dev/null +++ b/docs/integrations/sources/openweather.md @@ -0,0 +1,38 @@ +# OpenWeather + +## Overview + +This source connector syncs data from the [OpenWeather One Call API](https://openweathermap.org/api/one-call-api). This API allows to obtain current and weather data from a geolocation expressed in latitude and longitude. + +### Output schema + +This source currently has a single stream, `openweather_one_call`. An example of the data outputted by this stream is available [here](https://openweathermap.org/api/one-call-api#example). + +### Features + +| Feature | Supported? | +| :--- | :--- | +| Full Refresh Sync - (append only) | Yes | +| Incremental - Append Sync | Yes | +| Namespaces | No | + +## Getting started + +### Requirements + +* An OpenWeather API key +* Latitude and longitude of the location for which you want to get weather data + +### Setup guide + +Visit the [OpenWeather](https://openweathermap.org) to create a user account and obtain an API key. The *One Call API* is available with the free plan. + +## Rate limiting +The free plan allows 60 calls per minute and 1,000,000 calls per month, you won't get beyond these limits with existing Airbyte's sync frequencies. + +## Changelog + +| Version | Date | Pull Request | Subject | +| :--- | :--- | :--- | :--- | +| 0.1.0 | 2021-10-27 | [7434](https://github.com/airbytehq/airbyte/pull/7434) | Initial release | + diff --git a/docs/integrations/sources/paypal-transaction.md b/docs/integrations/sources/paypal-transaction.md index f9023b31105b..c21d6cdaaaa4 100644 --- a/docs/integrations/sources/paypal-transaction.md +++ b/docs/integrations/sources/paypal-transaction.md @@ -57,6 +57,7 @@ Transactions sync is performed with default `stream_slice_period` = 1 day, it me | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.3 | 2021-12-16 | [8580](https://github.com/airbytehq/airbyte/pull/8580) | Added more logs during `check connection` stage | | 0.1.2 | 2021-11-08 | [7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | | 0.1.1 | 2021-08-03 | [5155](https://github.com/airbytehq/airbyte/pull/5155) | fix start\_date\_min limit | | 0.1.0 | 2021-06-10 | [4240](https://github.com/airbytehq/airbyte/pull/4240) | PayPal Transaction Search API | diff --git a/docs/integrations/sources/shopify.md b/docs/integrations/sources/shopify.md index 0dc8bc745e12..e86be77f4995 100644 --- a/docs/integrations/sources/shopify.md +++ b/docs/integrations/sources/shopify.md @@ -101,6 +101,7 @@ This connector support both: `OAuth 2.0` and `API PASSWORD` (for private applica | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.26 | 2021-12-14 | [8597](https://github.com/airbytehq/airbyte/pull/8597) | Fix `mismatched number of tables` for base-normalization, increased performance of `order_refunds` stream | | 0.1.25 | 2021-12-02 | [8297](https://github.com/airbytehq/airbyte/pull/8297) | Added Shop stream | | 0.1.24 | 2021-11-30 | [7783](https://github.com/airbytehq/airbyte/pull/7783) | Reviewed and corrected schemas for all streams | | 0.1.23 | 2021-11-15 | [7973](https://github.com/airbytehq/airbyte/pull/7973) | Added `InventoryItems` | diff --git a/docs/integrations/sources/snapchat-marketing.md b/docs/integrations/sources/snapchat-marketing.md index 6d7a99999f4d..3ecd3d02b79d 100644 --- a/docs/integrations/sources/snapchat-marketing.md +++ b/docs/integrations/sources/snapchat-marketing.md @@ -89,6 +89,7 @@ Snapchat Marketing API has limitations to 1000 items per page | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.4 | 2021-12-07 | [8429](https://github.com/airbytehq/airbyte/pull/8429) | Update titles and descriptions | | 0.1.3 | 2021-11-10 | [7811](https://github.com/airbytehq/airbyte/pull/7811) | Add oauth2.0, fix stream_state | | 0.1.2 | 2021-11-08 | [7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | | 0.1.1 | 2021-07-29 | [5072](https://github.com/airbytehq/airbyte/pull/5072) | Fix bug with incorrect stream\_state value | diff --git a/docs/integrations/sources/strava.md b/docs/integrations/sources/strava.md index 2adc5904ecec..ad7fbd3c9dd4 100644 --- a/docs/integrations/sources/strava.md +++ b/docs/integrations/sources/strava.md @@ -89,6 +89,7 @@ More information about Strava rate limits and adjustments to those limits can be | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.2 | 2021-12-15 | [8799](https://github.com/airbytehq/airbyte/pull/8799) | Implement OAuth 2.0 support | | 0.1.1 | 2021-12-06 | [8425](https://github.com/airbytehq/airbyte/pull/8425) | Update title, description fields in spec | | 0.1.0 | 2021-10-18 | [7151](https://github.com/airbytehq/airbyte/pull/7151) | Initial release supporting Strava API | diff --git a/docs/integrations/sources/zendesk-support.md b/docs/integrations/sources/zendesk-support.md index ccc2d1cc48dc..9764a4f45414 100644 --- a/docs/integrations/sources/zendesk-support.md +++ b/docs/integrations/sources/zendesk-support.md @@ -97,6 +97,7 @@ We recommend creating a restricted, read-only key specifically for Airbyte acces | Version | Date | Pull Request | Subject | | :------ | :-------- | :----- | :------ | +| `0.1.9` | 2021-12-16 | [8616](https://github.com/airbytehq/airbyte/pull/8616) | Adds Brands, CustomRoles and Schedules | | `0.1.8` | 2021-11-23 | [8050](https://github.com/airbytehq/airbyte/pull/8168) | Adds TicketMetricEvents | | `0.1.7` | 2021-11-23 | [8058](https://github.com/airbytehq/airbyte/pull/8058) | support AccessToken auth | | `0.1.6` | 2021-11-18 | [8050](https://github.com/airbytehq/airbyte/pull/8050) | Fix wrong types for schemas, add Transformer | diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index 6dbea8d6c847..87b4bc089357 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -99,7 +99,7 @@ If you are upgrading from \(i.e. your current version of Airbyte is\) Airbyte ve Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.33.12-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.34.1-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/docs/operator-guides/using-prefect-task.md b/docs/operator-guides/using-prefect-task.md index 83d4590380e3..3147fa5f6242 100644 --- a/docs/operator-guides/using-prefect-task.md +++ b/docs/operator-guides/using-prefect-task.md @@ -38,7 +38,7 @@ We'll need the Airbyte Connection ID so our Prefect Flow knows which Airbyte Con This ID can be seen in the URL on the connection page in the Airbyte UI. The Airbyte UI can be accessed at `localhost:8000`. -### Creating a simple Airflow DAG to run an Airbyte Sync Job +### Creating a simple Prefect DAG to run an Airbyte Sync Job Create a new folder called `airbyte_prefect` and create a file `airbyte_prefect_flow.py`. diff --git a/docs/understanding-airbyte/connections/README.md b/docs/understanding-airbyte/connections/README.md index 5c8f3f84364c..38da0d4bfd0f 100644 --- a/docs/understanding-airbyte/connections/README.md +++ b/docs/understanding-airbyte/connections/README.md @@ -45,14 +45,14 @@ All the customization of namespace and stream names described above will be equa A sync mode governs how Airbyte reads from a source and writes to a destination. Airbyte provides different sync modes to account for various use cases. To minimize confusion, a mode's behavior is reflected in its name. The easiest way to understand Airbyte's sync modes is to understand how the modes are named. 1. The first part of the name denotes how the source connector reads data from the source: -2. Incremental: Read records added to the source since the last sync job. \(The first sync using Incremental is equivalent to a Full Refresh\) - * Method 1: Using a cursor. Generally supported by all connectors whose data source allows extracting records incrementally. - * Method 2: Using change data capture. Only supported by some sources. See [CDC](../cdc.md) for more info. -3. Full Refresh: Read everything in the source. -4. The second part of the sync mode name denotes how the destination connector writes data. This is not affected by how the source connector produced the data: -5. Overwrite: Overwrite by first deleting existing data in the destination. -6. Append: Write by adding data to existing tables in the destination. -7. Deduped History: Write by first adding data to existing tables in the destination to keep a history of changes. The final table is produced by de-duplicating the intermediate ones using a primary key. + 1. Incremental: Read records added to the source since the last sync job. \(The first sync using Incremental is equivalent to a Full Refresh\) + * Method 1: Using a cursor. Generally supported by all connectors whose data source allows extracting records incrementally. + * Method 2: Using change data capture. Only supported by some sources. See [CDC](../cdc.md) for more info. + 2. Full Refresh: Read everything in the source. +2. The second part of the sync mode name denotes how the destination connector writes data. This is not affected by how the source connector produced the data: + 1. Overwrite: Overwrite by first deleting existing data in the destination. + 2. Append: Write by adding data to existing tables in the destination. + 3. Deduped History: Write by first adding data to existing tables in the destination to keep a history of changes. The final table is produced by de-duplicating the intermediate ones using a primary key. A sync mode is therefore, a combination of a source and destination mode together. The UI exposes the following options, whenever both source and destination connectors are capable to support it for the corresponding stream: diff --git a/docs/understanding-airbyte/json-avro-conversion.md b/docs/understanding-airbyte/json-avro-conversion.md index a2fa6e730ccd..306c8b4a59b2 100644 --- a/docs/understanding-airbyte/json-avro-conversion.md +++ b/docs/understanding-airbyte/json-avro-conversion.md @@ -2,42 +2,53 @@ When an Airbyte data stream is synced to the Avro or Parquet format (e.g. Parquet on S3), the source Json schema is converted to an Avro schema, then the Json object is converted to an Avro record based on the Avro schema (and further to Parquet if necessary). Because the data stream can come from any data source, the Json to Avro conversion process has the following rules and limitations. -1. Json schema types are mapped to Avro types as follows: - - | Json Data Type | Avro Data Type | - | :---: | :---: | - | string | string | - | number | double | - | integer | int | - | boolean | boolean | - | null | null | - | object | record | - | array | array | - -2. Built-in Json schema date-time formats will be mapped to Avro logical types - +## Conversion Rules + +### Type Mapping + +Json schema types are mapped to Avro types as follows: + +| Json Data Type | Avro Data Type | +| :---: | :---: | +| string | string | +| number | double | +| integer | int | +| boolean | boolean | +| null | null | +| object | record | +| array | array | + +### Built-in Formats + +The following built-in Json formats will be mapped to Avro logical types. + +| Json Type | Json Built-in Format | Avro Type | Avro Logical Type | Meaning | +| --- | --- | --- | --- | --- | +| `string` | `date` | `int` | `date` | Number of epoch days from 1970-01-01 ([reference](https://avro.apache.org/docs/current/spec.html#Date)). | +| `string` | `time` | `long` | `time-micros` | Number of microseconds after midnight ([reference](https://avro.apache.org/docs/current/spec.html#Time+%28microsecond+precision%29)). | +| `string` | `date-time` | `long` | `timestamp-micros` | Number of microseconds from `1970-01-01T00:00:00Z` ([reference](https://avro.apache.org/docs/current/spec.html#Timestamp+%28microsecond+precision%29)). | + **Date** The date logical type represents a date within the calendar, with no reference to a particular time zone or time of day. A date logical type annotates an Avro int, where the int stores the number of days from the unix epoch, 1 January 1970 (ISO calendar). +```json +{ + "type": "string", + "format": "date" +} +``` - ```json - { - "type": "string", - "format": "date" - } - ``` - - will become in Avro schema: +will become in Avro schema: - ```json - { - "type": "int", - "logicalType": "date" - } - ``` +```json +{ + "type": "int", + "logicalType": "date" +} +``` **Time (microsecond precision)** @@ -45,22 +56,21 @@ The time-micros logical type represents a time of day, with no reference to a pa A time-micros logical type annotates an Avro long, where the long stores the number of microseconds after midnight, 00:00:00.000000. - - ```json - { - "type": "string", - "format": "time" - } - ``` +```json +{ + "type": "string", + "format": "time" +} +``` will become in Avro schema: - ```json - { - "type": "long", - "logicalType": "time-micros" - } - ``` +```json +{ + "type": "long", + "logicalType": "time-micros" +} +``` **Timestamp (microsecond precision)** @@ -68,90 +78,159 @@ The timestamp-micros logical type represents an instant on the global timeline, A timestamp-micros logical type annotates an Avro long, where the long stores the number of microseconds from the unix epoch, 1 January 1970 00:00:00.000000 UTC. - - ```json - { - "type": "string", - "format": "date-time" - } - ``` +```json +{ + "type": "string", + "format": "date-time" +} +``` will become in Avro schema: - ```json - { - "type": "long", - "logicalType": "timestamp-micros" - } - ``` +```json +{ + "type": "long", + "logicalType": "timestamp-micros" +} +``` + +### Combined Restrictions + +Combined restrictions \(`allOf`, `anyOf`, and `oneOf`\) will be converted to type unions. The corresponding Avro schema can be less stringent. For example, the following Json schema + +```json +{ + "oneOf": [ + {"type": "string"}, + {"type": "integer"} + ] +} +``` + +will become this in Avro schema: + +```json +{ + "type": ["null", "string", "int"] +} +``` + +### Keyword `not` + +Keyword `not` is not supported, as there is no equivalent validation mechanism in Avro schema. + +### Filed Name + +Only alphanumeric characters and underscores \(`/a-zA-Z0-9_/`\) are allowed in a stream or field name. Any special character will be converted to an alphabet or underscore. For example, `spécial:character_names` will become `special_character_names`. The original names will be stored in the `doc`property in this format: `_airbyte_original_name:`. + +Field name cannot start with a number, so an underscore will be added to those field names at the beginning. -3. Combined restrictions \("allOf", "anyOf", and "oneOf"\) will be converted to type unions. The corresponding Avro schema can be less stringent. For example, the following Json schema +### Nullable Fields - ```json - { - "oneOf": [ - { "type": "string" }, - { "type": "integer" } +All field will be nullable. For example, a `string` Json field will be typed as `["null", "string"]` in Avro. This is necessary because the incoming data stream may have optional fields. + +### Array Types + +For array fields in Json schema, when the `items` property is an array, it means that each element in the array should follow its own schema sequentially. For example, the following specification means the first item in the array should be a string, and the second a number. + +```json +{ + "array_field": { + "type": "array", + "items": [ + {"type": "string"}, + {"type": "number"} ] - } - ``` - - will become this in Avro schema: - - ```json - { - "type": ["null", "string", "int"] - } - ``` - -4. Keyword `not` is not supported, as there is no equivalent validation mechanism in Avro schema. -5. Only alphanumeric characters and underscores \(`/a-zA-Z0-9_/`\) are allowed in a stream or field name. Any special character will be converted to an alphabet or underscore. For example, `spécial:character_names` will become `special_character_names`. The original names will be stored in the `doc` property in this format: `_airbyte_original_name:`. -6. The field name cannot start with a number, so an underscore will be added to the field name at the beginning. -7. All field will be nullable. For example, a `string` Json field will be typed as `["null", "string"]` in Avro. This is necessary because the incoming data stream may have optional fields. -8. For array fields in Json schema, when the `items` property is an array, it means that each element in the array should follow its own schema sequentially. For example, the following specification means the first item in the array should be a string, and the second a number. - - ```json - { - "array_field": { + } +} +``` + +This is not supported in Avro schema. As a compromise, the converter creates a union, `["null", "string", "number"]`, which is less stringent: + +```json +{ + "name": "array_field", + "type": [ + "null", + { "type": "array", - "items": [ - { "type": "string" }, - { "type": "number" } - ] + "items": ["null", "string", "number"] } - } - ``` - - This is not supported in Avro schema. As a compromise, the converter creates a union, \["string", "number"\], which is less stringent: - - ```json - { - "name": "array_field", - "type": [ - "null", - { - "type": "array", - "items": ["null", "string"] - } - ], - "default": null - } - ``` + ], + "default": null +} +``` -9. Three Airbyte specific fields will be added to each Avro record: +### Untyped Array - | Field | Schema | Document | - | :--- | :--- | :---: | - | `_airbyte_ab_id` | `uuid` | [link](http://avro.apache.org/docs/current/spec.html#UUID) | - | `_airbyte_emitted_at` | `timestamp-millis` | [link](http://avro.apache.org/docs/current/spec.html#Timestamp+%28millisecond+precision%29) | - | `_airbyte_additional_properties` | `map` of `string` | See explanation below. | +When a Json array field has no `items`, the element in that array field may have any type. However, Avro requires that each array has a clear type specification. To solve this problem, the elements in the array are forced to be `string`s. -10. A Json object can have additional properties of unknown types, which is not compatible with the Avro schema. To solve this problem during Json to Avro object conversion, we introduce a special field: `_airbyte_additional_properties` typed as a nullable `map` from `string` to `string`: +For example, given the following Json schema and object: + +```json +{ + "type": "object", + "properties": { + "identifier": { + "type": "array" + } + } +} +``` + +```json +{ + "identifier": ["151", 152, true, {"id": 153}, null] +} +``` + +the corresponding Avro schema and object will be: + +```json +{ + "type": "record", + "fields": [ + { + "name": "identifier", + "type": [ + "null", + { + "type": "array", + "items": ["null", "string"] + } + ], + "default": null + } + ] +} +``` + +```json +{ + "identifier": ["151", "152", "true", "{\"id\": 153}", null] +} +``` + +Note that every non-null element inside the `identifier` array field is converted to string. + +### Airbyte-Specific Fields + +Three Airbyte specific fields will be added to each Avro record: + +| Field | Schema | Document | +| :--- | :--- | :---: | +| `_airbyte_ab_id` | `uuid` | [link](http://avro.apache.org/docs/current/spec.html#UUID) | +| `_airbyte_emitted_at` | `timestamp-millis` | [link](http://avro.apache.org/docs/current/spec.html#Timestamp+%28millisecond+precision%29) | +| `_airbyte_additional_properties` | `map` of `string` | See explanation below. | + +### Additional Properties + +A Json object can have additional properties of unknown types, which is not compatible with the Avro schema. To solve this problem during Json to Avro object conversion, we introduce a special field: `_airbyte_additional_properties` typed as a nullable `map` from `string` to `string`: ```json { "name": "_airbyte_additional_properties", - "type": ["null", { "type": "map", "values": "string" }], + "type": ["null", {"type": "map", "values": "string"}], "default": null } ``` @@ -161,7 +240,6 @@ For example, given the following Json schema: ```json { "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", "properties": { "username": { "type": ["null", "string"] @@ -201,7 +279,55 @@ will be converted to the following Avro object: Note that all fields other than the `username` is moved under `_ab_additional_properties` as serialized strings, including the original object `auth`. -11. Based on the above rules, here is an overall example. Given the following Json schema: +### Untyped Object + +If an `object` field has no `properties` specification, all fields within this `object` will be put into the aforementioned `_airbyte_additional_properties` field. + +For example, given the following Json schema and object: + +```json +{ + "type": "object" +} +``` + +```json +{ + "username": "343-guilty-spark", + "password": 1439, + "active": true +} +``` + +the corresponding Avro schema and record will be: + +```json +{ + "type": "record", + "name": "record_without_properties", + "fields": [ + { + "name": "_airbyte_additional_properties", + "type": ["null", {"type": "map", "values": "string"}], + "default": null + } + ] +} +``` + +```json +{ + "_airbyte_additional_properties": { + "username": "343-guilty-spark", + "password": "1439", + "active": "true" + } +} +``` + +## Example + +Based on the above rules, here is an overall example. Given the following Json schema: ```json { @@ -277,7 +403,7 @@ Its corresponding Avro schema will be: }, { "name": "_airbyte_additional_properties", - "type": ["null", { "type": "map", "values": "string" }], + "type": ["null", {"type": "map", "values": "string"}], "default": null } ] @@ -287,15 +413,24 @@ Its corresponding Avro schema will be: }, { "name": "created_at", - "type": ["null", "string"], + "type": [ + "null", + {"type": "long", "logicalType": "timestamp-micros"}, + "string" + ], "default": null }, { "name": "_airbyte_additional_properties", - "type": ["null", { "type": "map", "values": "string" }], + "type": ["null", {"type": "map", "values": "string"}], "default": null } ] } - ``` + +More examples can be found in the Json to Avro conversion [test cases](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-s3/src/test/resources/parquet/json_schema_converter/json_conversion_test_cases.json). + +## Implementation +- Schema conversion: [JsonToAvroSchemaConverter](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/JsonToAvroSchemaConverter.java) +- Object conversion: [airbytehq/json-avro-converter](https://github.com/airbytehq/json-avro-converter) (forked and modified from [allegro/json-avro-converter](https://github.com/allegro/json-avro-converter)). diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index 71c87d8bb407..39f9db5a1f3f 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.33.12-alpha +AIRBYTE_VERSION=0.34.1-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 89e09a457923..582a9ea6219d 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: airbyte/bootloader - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: airbyte/scheduler - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: airbyte/server - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: airbyte/webapp - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: airbyte/worker - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 71c87d8bb407..39f9db5a1f3f 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.33.12-alpha +AIRBYTE_VERSION=0.34.1-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index 941eef696bd0..eb29aacd01c1 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: airbyte/bootloader - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: airbyte/scheduler - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: airbyte/server - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: airbyte/webapp - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: airbyte/worker - newTag: 0.33.12-alpha + newTag: 0.34.1-alpha - name: temporalio/auto-setup newTag: 1.7.0 @@ -29,4 +29,5 @@ configMapGenerator: secretGenerator: - name: airbyte-secrets - env: .secrets + envs: + - .secrets diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh new file mode 100755 index 000000000000..e69de29bb2d1 diff --git a/tools/bin/ci_performance_test.sh b/tools/bin/ci_performance_test.sh index 85a879017305..3caa5154ad40 100755 --- a/tools/bin/ci_performance_test.sh +++ b/tools/bin/ci_performance_test.sh @@ -7,8 +7,6 @@ set -e # runs performance tests for an performance name connector="$1" -firstarg="" -secondarg="" if [[ "$2" ]]; then if [[ "$2" == *"cpulimit"* ]]; then firstarg="-DcpuLimit=$(echo $2 | cut -d / -f 2)" @@ -46,14 +44,24 @@ else elif [[ "$connector" == *"connectors"* ]]; then connector_name=$(echo $connector | cut -d / -f 2) selected_performance_test=$(echo "$all_performance_tests" | grep "^$connector_name$" || echo "") - performanceTestCommand="$(_to_gradle_path "airbyte-integrations/$connector" performanceTest) $firstarg $secondargt" + performanceTestCommand="$(_to_gradle_path "airbyte-integrations/$connector" performanceTest)" else selected_performance_test=$(echo "$all_performance_tests" | grep "^$connector$" || echo "") - performanceTestCommand=":airbyte-integrations:connectors:$connector:performanceTest $firstarg $secondarg" + performanceTestCommand=":airbyte-integrations:connectors:$connector:performanceTest" fi if [ -n "$selected_performance_test" ] ; then - echo "Running: ./gradlew --no-daemon --scan $performanceTestCommand" - ./gradlew --no-daemon --scan "$performanceTestCommand" + if [[ "$firstarg" ]]; then + if [[ "$secondarg" ]]; then + echo "Running: ./gradlew --no-daemon --scan $performanceTestCommand $firstarg $secondarg" + ./gradlew --no-daemon --scan "$performanceTestCommand" "$firstarg" "$secondarg" + else + echo "Running: ./gradlew --no-daemon --scan $performanceTestCommand $firstarg" + ./gradlew --no-daemon --scan "$performanceTestCommand" "$firstarg" + fi + else + echo "Running: ./gradlew --no-daemon --scan $performanceTestCommand" + ./gradlew --no-daemon --scan "$performanceTestCommand" + fi else echo "Connector '$connector' not found..." return 1 diff --git a/tools/bin/release_version.sh b/tools/bin/release_version.sh index 37c6262ea82b..e2e246aa2842 100755 --- a/tools/bin/release_version.sh +++ b/tools/bin/release_version.sh @@ -21,7 +21,7 @@ fi docker login -u airbytebot -p "${DOCKER_PASSWORD}" -PREV_VERSION=$(grep VERSION .env | cut -d"=" -f2) +PREV_VERSION=$(grep -w VERSION .env | cut -d"=" -f2) [[ -z "$PART_TO_BUMP" ]] && echo "Usage ./tools/bin/release_version.sh (major|minor|patch)" && exit 1 @@ -31,7 +31,7 @@ PREV_VERSION=$(grep VERSION .env | cut -d"=" -f2) pip install bumpversion bumpversion "$PART_TO_BUMP" -NEW_VERSION=$(grep VERSION .env | cut -d"=" -f2) +NEW_VERSION=$(grep -w VERSION .env | cut -d"=" -f2) GIT_REVISION=$(git rev-parse HEAD) [[ -z "$GIT_REVISION" ]] && echo "Couldn't get the git revision..." && exit 1 diff --git a/tools/bin/tag_version.sh b/tools/bin/tag_version.sh index b83002feef8d..75fdf9bcd190 100755 --- a/tools/bin/tag_version.sh +++ b/tools/bin/tag_version.sh @@ -15,7 +15,7 @@ fi # make sure your master branch is up to date git pull --rebase -VERSION=$(cat .env | grep VERSION= | cut -d= -f 2) +VERSION=$(cat .env | grep -w VERSION | cut -d= -f 2) [[ -z "$VERSION" ]] && echo "Couldn't find version in env file..." && exit 1 TAG_NAME="v$VERSION" From f2a71246d932ff55dfd4560c8c848756162e9d5b Mon Sep 17 00:00:00 2001 From: Marcos Marx Date: Fri, 17 Dec 2021 19:20:15 -0300 Subject: [PATCH 12/12] resolve webapp files --- .../views/layout/MainView/ErrorBoundary.tsx | 44 ------------------- .../cloud/views/layout/MainView/MainView.tsx | 2 +- .../src/views/layout/MainView/MainView.tsx | 2 +- 3 files changed, 2 insertions(+), 46 deletions(-) delete mode 100644 airbyte-webapp/src/packages/cloud/views/layout/MainView/ErrorBoundary.tsx diff --git a/airbyte-webapp/src/packages/cloud/views/layout/MainView/ErrorBoundary.tsx b/airbyte-webapp/src/packages/cloud/views/layout/MainView/ErrorBoundary.tsx deleted file mode 100644 index 1fdd8eb3f5da..000000000000 --- a/airbyte-webapp/src/packages/cloud/views/layout/MainView/ErrorBoundary.tsx +++ /dev/null @@ -1,44 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; - -import { CommonRequestError } from "core/request/CommonRequestError"; - -type BoundaryState = { hasError: boolean; message?: React.ReactNode | null }; - -const initialState: BoundaryState = { - hasError: false, - message: null, -}; - -export class ErrorBoundary extends React.Component< - { errorComponent: React.ReactElement }, - BoundaryState -> { - static getDerivedStateFromError(error: CommonRequestError): BoundaryState { - if (error.message.startsWith("Insufficient permissions")) { - return { hasError: true, message: error.message }; - } else if (error.status === 422) { - return { - hasError: true, - message: , - }; - } else { - throw error; - } - } - - state = initialState; - - reset = (): void => { - this.setState(initialState); - }; - - render(): React.ReactNode { - return this.state.hasError - ? React.cloneElement(this.props.errorComponent, { - message: this.state.message, - onReset: this.reset, - }) - : this.props.children; - } -} diff --git a/airbyte-webapp/src/packages/cloud/views/layout/MainView/MainView.tsx b/airbyte-webapp/src/packages/cloud/views/layout/MainView/MainView.tsx index e159c47ab07e..e1e5c1375251 100644 --- a/airbyte-webapp/src/packages/cloud/views/layout/MainView/MainView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/layout/MainView/MainView.tsx @@ -24,7 +24,7 @@ const Content = styled.div` height: 100%; `; -const MainView: React.FC = ({ children }) => ( +const MainView: React.FC = (props) => ( } diff --git a/airbyte-webapp/src/views/layout/MainView/MainView.tsx b/airbyte-webapp/src/views/layout/MainView/MainView.tsx index 70b737b11cf9..67ebd9b33969 100644 --- a/airbyte-webapp/src/views/layout/MainView/MainView.tsx +++ b/airbyte-webapp/src/views/layout/MainView/MainView.tsx @@ -1,4 +1,4 @@ -import React, { Suspense } from "react"; +import React from "react"; import styled from "styled-components"; import { LoadingPage } from "components";