Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🎉 Source greenhouse: added identification of accessible streams for API keys with limited permissions #6238

Merged
merged 6 commits into from
Sep 21, 2021
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/publish-command.yml
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ jobs:
GOOGLE_SHEETS_TESTS_CREDS: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS }}
GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS: ${{ secrets.GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS }}
GREENHOUSE_TEST_CREDS: ${{ secrets.GREENHOUSE_TEST_CREDS }}
GREENHOUSE_TEST_CREDS_LIMITED: ${{ secrets.GREENHOUSE_TEST_CREDS_LIMITED }}
HARVEST_INTEGRATION_TESTS_CREDS: ${{ secrets.HARVEST_INTEGRATION_TESTS_CREDS }}
HUBSPOT_INTEGRATION_TESTS_CREDS: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS }}
INSTAGRAM_INTEGRATION_TESTS_CREDS: ${{ secrets.INSTAGRAM_INTEGRATION_TESTS_CREDS }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/test-command.yml
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ jobs:
GOOGLE_SHEETS_TESTS_CREDS: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS }}
GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS: ${{ secrets.GOOGLE_WORKSPACE_ADMIN_REPORTS_TEST_CREDS }}
GREENHOUSE_TEST_CREDS: ${{ secrets.GREENHOUSE_TEST_CREDS }}
GREENHOUSE_TEST_CREDS_LIMITED: ${{ secrets.GREENHOUSE_TEST_CREDS_LIMITED }}
HARVEST_INTEGRATION_TESTS_CREDS: ${{ secrets.HARVEST_INTEGRATION_TESTS_CREDS }}
HUBSPOT_INTEGRATION_TESTS_CREDS: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS }}
INSTAGRAM_INTEGRATION_TESTS_CREDS: ${{ secrets.INSTAGRAM_INTEGRATION_TESTS_CREDS }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"sourceDefinitionId": "59f1e50a-331f-4f09-b3e8-2e8d4d355f44",
"name": "Greenhouse",
"dockerRepository": "airbyte/source-greenhouse",
"dockerImageTag": "0.2.3",
"dockerImageTag": "0.2.4",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/greenhouse",
"icon": "greenhouse.svg"
}
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@
- sourceDefinitionId: 59f1e50a-331f-4f09-b3e8-2e8d4d355f44
name: Greenhouse
dockerRepository: airbyte/source-greenhouse
dockerImageTag: 0.2.3
dockerImageTag: 0.2.4
documentationUrl: https://docs.airbyte.io/integrations/sources/greenhouse
icon: greenhouse.svg
- sourceDefinitionId: 40d24d0f-b8f9-4fe0-9e6c-b06c0f3f45e4
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,5 @@ RUN pip install .

ENV AIRBYTE_ENTRYPOINT "/airbyte/base.sh"

LABEL io.airbyte.version=0.2.3
LABEL io.airbyte.version=0.2.4
LABEL io.airbyte.name=airbyte/source-greenhouse
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference)
# for more information about how to configure these tests
connector_image: airbyte/source-greenhouse:dev
tests:
spec:
- spec_path: "source_greenhouse/spec.json"
connection:
- config_path: "secrets/config.json"
status: "succeed"
- config_path: "secrets/config_users_only.json"
status: "succeed"
- config_path: "integration_tests/config_invalid.json"
status: "failed"
discovery:
- config_path: "secrets/config.json"
- config_path: "secrets/config_users_only.json"
basic_read:
- config_path: "secrets/config.json"
configured_catalog_path: "integration_tests/configured_catalog.json"
- config_path: "secrets/config.json"
configured_catalog_path: "integration_tests/configured_catalog_users_only.json"
full_refresh:
- config_path: "secrets/config.json"
configured_catalog_path: "integration_tests/configured_catalog.json"
- config_path: "secrets/config_users_only.json"
configured_catalog_path: "integration_tests/configured_catalog_users_only.json"
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/usr/bin/env sh

# Build latest connector image
docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2)

# Pull latest acctest image
docker pull airbyte/source-acceptance-test:latest

# Run
docker run --rm -it \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /tmp:/tmp \
-v $(pwd):/test_input \
airbyte/source-acceptance-test \
--acceptance-test-config /test_input

Original file line number Diff line number Diff line change
Expand Up @@ -24,25 +24,11 @@


import pytest
from grnhse.exceptions import EndpointNotFound, HTTPError
from source_greenhouse.client import Client

pytest_plugins = ("source_acceptance_test.plugin",)

def test__heal_check_with_wrong_api_key():
midavadim marked this conversation as resolved.
Show resolved Hide resolved
client = Client(api_key="wrong_key")
alive, error = client.health_check()

assert not alive
assert error == '401 {"message":"Invalid Basic Auth credentials"}'


def test__custom_fields_with_wrong_api_key():
client = Client(api_key="wrong_key")
with pytest.raises(HTTPError, match='401 {"message":"Invalid Basic Auth credentials"}'):
list(client.list("custom_fields"))


def test_client_wrong_endpoint():
client = Client(api_key="wrong_key")
with pytest.raises(EndpointNotFound, match="unknown_endpoint"):
next(client.list("unknown_endpoint"))
@pytest.fixture(scope="session", autouse=True)
def connector_setup():
""" This fixture is a placeholder for external resources that acceptance test might require."""
yield
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"api_key": "bla"
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: newline

Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
{
"streams": [
{
"stream": {
"name": "applications",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "candidates",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "close_reasons",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "degrees",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "departments",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "job_posts",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "jobs",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "offers",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "scorecards",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "users",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "custom_fields",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"streams": [
{
"stream": {
"name": "users",
"json_schema": {},
"supported_sync_modes": ["full_refresh"],
"source_defined_cursor": false
},
"sync_mode": "full_refresh",
"destination_sync_mode": "overwrite"
}
]
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies.
-e ../../bases/airbyte-protocol
-e ../../bases/base-python
-e ../../bases/source-acceptance-test
-e .
9 changes: 8 additions & 1 deletion airbyte-integrations/connectors/source-greenhouse/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,19 @@

from setuptools import find_packages, setup

TEST_REQUIREMENTS = [
"pytest~=6.1",
"source-acceptance-test",
]
setup(
name="source_greenhouse",
description="Source implementation for Greenhouse.",
author="Airbyte",
author_email="[email protected]",
packages=find_packages(),
install_requires=["airbyte-protocol", "base-python", "six==1.15.0", "grnhse-api==0.1.1", "pytest==6.1.2"],
install_requires=["airbyte-protocol", "base-python", "six==1.15.0", "grnhse-api==0.1.1"],
package_data={"": ["*.json", "schemas/*.json"]},
extras_require={
"tests": TEST_REQUIREMENTS,
},
)
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@


from functools import partial
from typing import Mapping, Tuple
from typing import Generator, List, Mapping, Tuple

from base_python import BaseClient
from airbyte_protocol import AirbyteStream
from base_python import AirbyteLogger, BaseClient
from grnhse import Harvest
from grnhse.exceptions import HTTPError

Expand Down Expand Up @@ -67,16 +68,42 @@ def list(self, name, **kwargs):
def _enumerate_methods(self) -> Mapping[str, callable]:
return {entity: partial(self.list, name=entity) for entity in self.ENTITIES}

def get_accessible_endpoints(self) -> List[str]:
"""Try to read each supported endpoint and return accessible stream names"""
logger = AirbyteLogger()
accessible_endpoints = []
for entity in self.ENTITIES:
try:
getattr(self._client, entity).get()
accessible_endpoints.append(entity)
except HTTPError as error:
logger.warn(f"Endpoint '{entity}' error: {str(error)}")
if "This API Key does not have permission for this endpoint" not in str(error):
midavadim marked this conversation as resolved.
Show resolved Hide resolved
raise error
logger.info(f"API key has access to {len(accessible_endpoints)} endpoints: {accessible_endpoints}")
return accessible_endpoints

def health_check(self) -> Tuple[bool, str]:
alive = True
error_msg = None

try:
# because there is no good candidate to try our connection
# we use users endpoint as potentially smallest dataset
self._client.users.get()
accessible_endpoints = self.get_accessible_endpoints()
if not accessible_endpoints:
alive = False
error_msg = (
"Your API Key does not have permission for any existing endpoints. Please grant read permissions for required streams/endpoints"
)

except HTTPError as error:
alive = False
error_msg = str(error)

return alive, error_msg

@property
def streams(self) -> Generator[AirbyteStream, None, None]:
"""Process accessible streams only"""
accessible_endpoints = self.get_accessible_endpoints()
for stream in super().streams:
if stream.name in accessible_endpoints:
yield stream
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
"type": ["null", "string"]
},
"prospect_owner": {
"type": "object",
"type": ["null", "object"],
"properties": {
"name": {
"type": "string"
Expand Down Expand Up @@ -69,7 +69,15 @@
"type": "integer"
},
"current_stage": {
"type": ["null", "string"]
"type": ["null", "object"],
"properties": {
"name": {
"type": "string"
},
"id": {
"type": "integer"
}
}
},
"credited_to": {
"type": "object",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@
"type": ["null", "string"]
},
"prospect_owner": {
"type": "object",
"type": ["null", "object"],
"properties": {
"name": {
"type": "string"
Expand Down Expand Up @@ -137,7 +137,15 @@
"type": "integer"
},
"current_stage": {
"type": ["null", "string"]
"type": ["null", "object"],
"properties": {
"name": {
"type": "string"
},
"id": {
"type": "integer"
}
}
},
"credited_to": {
"type": "object",
Expand Down
Loading