diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 268c4c2cba692..56541065c0f4f 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 0.30.25-alpha
+current_version = 0.30.34-alpha
commit = False
tag = False
parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)?
@@ -10,6 +10,12 @@ serialize =
[bumpversion:file:.env]
+[bumpversion:file:airbyte-server/Dockerfile]
+
+[bumpversion:file:airbyte-workers/Dockerfile]
+
+[bumpversion:file:airbyte-scheduler/app/Dockerfile]
+
[bumpversion:file:airbyte-webapp/package.json]
[bumpversion:file:airbyte-webapp/package-lock.json]
diff --git a/.env b/.env
index afa76ab975b76..eb0f60456990c 100644
--- a/.env
+++ b/.env
@@ -1,4 +1,4 @@
-VERSION=0.30.25-alpha
+VERSION=0.30.34-alpha
# Airbyte Internal Job Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db
DATABASE_USER=docker
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 87f7d9dc3564f..a2505b2a5949b 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -29,7 +29,6 @@ Expand the relevant checklist and delete the others.
- [ ] `docs/integrations/README.md`
- [ ] `airbyte-integrations/builds.md`
- [ ] PR name follows [PR naming conventions](https://docs.airbyte.io/contributing-to-airbyte/updating-documentation#issues-and-pull-requests)
-- [ ] Connector added to connector index like described [here](https://docs.airbyte.io/connector-development#publishing-a-connector)
#### Airbyter
@@ -40,6 +39,8 @@ If this is a community PR, the Airbyte engineer reviewing this PR is responsible
- [ ] Credentials added to Github CI. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci).
- [ ] [`/test connector=connectors/` command](https://docs.airbyte.io/connector-development#updating-an-existing-connector) is passing.
- [ ] New Connector version released on Dockerhub by running the `/publish` command described [here](https://docs.airbyte.io/connector-development#updating-an-existing-connector)
+- [ ] After the connector is published, connector added to connector index as described [here](https://docs.airbyte.io/connector-development#publishing-a-connector)
+- [ ] Seed specs have been re-generated by building the platform and committing the changes to the seed spec files, as described [here](https://docs.airbyte.io/connector-development#publishing-a-connector)
@@ -59,7 +60,6 @@ If this is a community PR, the Airbyte engineer reviewing this PR is responsible
- [ ] Connector's `bootstrap.md`. See [description and examples](https://docs.google.com/document/d/1ypdgmwmEHWv-TrO4_YOQ7pAJGVrMp5BOkEVh831N260/edit?usp=sharing)
- [ ] Changelog updated in `docs/integrations//.md` including changelog. See changelog [example](https://docs.airbyte.io/integrations/sources/stripe#changelog)
- [ ] PR name follows [PR naming conventions](https://docs.airbyte.io/contributing-to-airbyte/updating-documentation#issues-and-pull-requests)
-- [ ] Connector version bumped like described [here](https://docs.airbyte.io/connector-development#publishing-a-connector)
#### Airbyter
@@ -70,6 +70,8 @@ If this is a community PR, the Airbyte engineer reviewing this PR is responsible
- [ ] Credentials added to Github CI. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci).
- [ ] [`/test connector=connectors/` command](https://docs.airbyte.io/connector-development#updating-an-existing-connector) is passing.
- [ ] New Connector version released on Dockerhub by running the `/publish` command described [here](https://docs.airbyte.io/connector-development#updating-an-existing-connector)
+- [ ] After the new connector version is published, connector version bumped in the seed directory as described [here](https://docs.airbyte.io/connector-development#publishing-a-connector)
+- [ ] Seed specs have been re-generated by building the platform and committing the changes to the seed spec files, as described [here](https://docs.airbyte.io/connector-development#publishing-a-connector)
diff --git a/.github/workflows/doc-link-check.json b/.github/workflows/doc-link-check.json
index 6777f5033b01c..a7683d73d66bd 100644
--- a/.github/workflows/doc-link-check.json
+++ b/.github/workflows/doc-link-check.json
@@ -45,11 +45,15 @@
"reason": "Test only scaffold connector",
"pattern": "destinations/scaffold-"
},
+ {
+ "reason": "Returns a 403 for many valid pages",
+ "pattern": "https://mvnrepository.com/artifact/"
+ },
{
"reason": "Archived articles aren't actively maintained.",
"pattern": "archive/"
}
],
"retryOn429": false,
- "aliveStatusCodes": [200, 206, 401, 403, 429, 503]
+ "aliveStatusCodes": [200, 206, 429, 503, 0]
}
diff --git a/.github/workflows/doc-link-check.yml b/.github/workflows/doc-link-check.yml
index 9ccfc86d846bf..1a52e49cd9b07 100644
--- a/.github/workflows/doc-link-check.yml
+++ b/.github/workflows/doc-link-check.yml
@@ -1,5 +1,6 @@
# Perform link check on all markdown files
-name: Doc Link Checker (Full)
+
+name: Doc Link Checker
on:
push:
@@ -12,10 +13,17 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
+ # check all files on master
- uses: gaurav-nelson/github-action-markdown-link-check@v1
+ if: github.ref == 'refs/heads/master'
with:
- use-quiet-mode: 'no'
- use-verbose-mode: 'yes'
+ use-quiet-mode: 'yes'
check-modified-files-only: 'no'
config-file: .github/workflows/doc-link-check.json
- base-branch: ${{ github.base_ref }}
+# # check changed files for branches
+# - uses: gaurav-nelson/github-action-markdown-link-check@v1
+# if: github.ref != 'refs/heads/master'
+# with:
+# use-quiet-mode: 'yes'
+# check-modified-files-only: 'yes'
+# config-file: .github/workflows/doc-link-check.json
diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml
index f485acc781ace..87ff147195de7 100644
--- a/.github/workflows/gradle.yml
+++ b/.github/workflows/gradle.yml
@@ -459,6 +459,17 @@ jobs:
- name: Build Platform Docker Images
run: SUB_BUILD=PLATFORM ./gradlew composeBuild --scan
+ - name: Run Kubernetes End-to-End Acceptance Tests
+ env:
+ USER: root
+ HOME: /home/runner
+ AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }}
+ SECRET_STORE_GCP_CREDENTIALS: ${{ secrets.SECRET_STORE_GCP_CREDENTIALS }}
+ SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }}
+ SECRET_STORE_FOR_CONFIGS: ${{ secrets.SECRET_STORE_FOR_CONFIGS }}
+ run: |
+ CI=true IS_MINIKUBE=true ./tools/bin/acceptance_test_kube.sh
+
- name: Run Logging Tests
run: ./tools/bin/cloud_storage_logging_test.sh
env:
@@ -481,16 +492,6 @@ jobs:
run: |
CI=true ./tools/bin/gcp_acceptance_tests.sh
- - name: Run Kubernetes End-to-End Acceptance Tests
- env:
- USER: root
- HOME: /home/runner
- AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }}
- SECRET_STORE_GCP_CREDENTIALS: ${{ secrets.SECRET_STORE_GCP_CREDENTIALS }}
- SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }}
- SECRET_STORE_FOR_CONFIGS: ${{ secrets.SECRET_STORE_FOR_CONFIGS }}
- run: |
- CI=true IS_MINIKUBE=true ./tools/bin/acceptance_test_kube.sh
# In case of self-hosted EC2 errors, remove this block.
stop-kube-acceptance-test-runner:
name: Stop Kube Acceptance Test EC2 Runner
diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml
index 79f3f90f128bf..22684e6e1cef9 100644
--- a/.github/workflows/publish-command.yml
+++ b/.github/workflows/publish-command.yml
@@ -107,6 +107,7 @@ jobs:
GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD }}
GOOGLE_CLOUD_STORAGE_TEST_CREDS: ${{ secrets.GOOGLE_CLOUD_STORAGE_TEST_CREDS }}
GOOGLE_DIRECTORY_TEST_CREDS: ${{ secrets.GOOGLE_DIRECTORY_TEST_CREDS }}
+ GOOGLE_DIRECTORY_TEST_CREDS_OAUTH: ${{ secrets.GOOGLE_DIRECTORY_TEST_CREDS_OAUTH }}
GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS }}
GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC }}
GOOGLE_SHEETS_TESTS_CREDS: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS }}
@@ -191,6 +192,7 @@ jobs:
MONGODB_TEST_CREDS: ${{ secrets.MONGODB_TEST_CREDS }}
SOURCE_ONESIGNAL_TEST_CREDS: ${{ secrets.SOURCE_ONESIGNAL_TEST_CREDS }}
SOURCE_SALESLOFT_TEST_CREDS: ${{ secrets.SOURCE_SALESLOFT_TEST_CREDS }}
+ SOURCE_CONFLUENCE_TEST_CREDS: ${{ secrets.SOURCE_CONFLUENCE_TEST_CREDS }}
SOURCE_AMAZON_SQS_TEST_CREDS: ${{ secrets.SOURCE_AMAZON_SQS_TEST_CREDS }}
SOURCE_FRESHSERVICE_TEST_CREDS: ${{ secrets.SOURCE_FRESHSERVICE_TEST_CREDS }}
SOURCE_LEMLIST_TEST_CREDS: ${{ secrets.SOURCE_LEMLIST_TEST_CREDS }}
@@ -198,6 +200,9 @@ jobs:
SOURCE_PAYSTACK_TEST_CREDS: ${{ secrets.SOURCE_PAYSTACK_TEST_CREDS }}
SOURCE_DELIGHTED_TEST_CREDS: ${{ secrets.SOURCE_DELIGHTED_TEST_CREDS }}
SOURCE_RETENTLY_TEST_CREDS: ${{ secrets.SOURCE_RETENTLY_TEST_CREDS }}
+ SOURCE_SENTRY_TEST_CREDS: ${{ secrets.SOURCE_SENTRY_TEST_CREDS }}
+ SOURCE_FRESHSALES_TEST_CREDS: ${{ secrets.SOURCE_FRESHSALES_TEST_CREDS }}
+ SOURCE_MONDAY_TEST_CREDS: ${{ secrets.SOURCE_MONDAY_TEST_CREDS }}
- run: |
echo "$SPEC_CACHE_SERVICE_ACCOUNT_KEY" > spec_cache_key_file.json && docker login -u airbytebot -p ${DOCKER_PASSWORD}
./tools/integrations/manage.sh publish airbyte-integrations/${{ github.event.inputs.connector }} ${{ github.event.inputs.run-tests }} --publish_spec_to_cache
diff --git a/.github/workflows/test-command.yml b/.github/workflows/test-command.yml
index 4e984b646c11c..a25bd99cce39b 100644
--- a/.github/workflows/test-command.yml
+++ b/.github/workflows/test-command.yml
@@ -102,6 +102,7 @@ jobs:
GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD: ${{ secrets.GOOGLE_ANALYTICS_V4_TEST_CREDS_OLD }}
GOOGLE_CLOUD_STORAGE_TEST_CREDS: ${{ secrets.GOOGLE_CLOUD_STORAGE_TEST_CREDS }}
GOOGLE_DIRECTORY_TEST_CREDS: ${{ secrets.GOOGLE_DIRECTORY_TEST_CREDS }}
+ GOOGLE_DIRECTORY_TEST_CREDS_OAUTH: ${{ secrets.GOOGLE_DIRECTORY_TEST_CREDS_OAUTH }}
GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS }}
GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC: ${{ secrets.GOOGLE_SEARCH_CONSOLE_CDK_TEST_CREDS_SRV_ACC }}
GOOGLE_SHEETS_TESTS_CREDS: ${{ secrets.GOOGLE_SHEETS_TESTS_CREDS }}
@@ -186,6 +187,7 @@ jobs:
MONGODB_TEST_CREDS: ${{ secrets.MONGODB_TEST_CREDS }}
SOURCE_ONESIGNAL_TEST_CREDS: ${{ secrets.SOURCE_ONESIGNAL_TEST_CREDS }}
SOURCE_SALESLOFT_TEST_CREDS: ${{ secrets.SOURCE_SALESLOFT_TEST_CREDS }}
+ SOURCE_CONFLUENCE_TEST_CREDS: ${{ secrets.SOURCE_CONFLUENCE_TEST_CREDS }}
SOURCE_AMAZON_SQS_TEST_CREDS: ${{ secrets.SOURCE_AMAZON_SQS_TEST_CREDS }}
SOURCE_FRESHSERVICE_TEST_CREDS: ${{ secrets.SOURCE_FRESHSERVICE_TEST_CREDS }}
SOURCE_LEMLIST_TEST_CREDS: ${{ secrets.SOURCE_LEMLIST_TEST_CREDS }}
@@ -193,6 +195,9 @@ jobs:
SOURCE_PAYSTACK_TEST_CREDS: ${{ secrets.SOURCE_PAYSTACK_TEST_CREDS }}
SOURCE_DELIGHTED_TEST_CREDS: ${{ secrets.SOURCE_DELIGHTED_TEST_CREDS }}
SOURCE_RETENTLY_TEST_CREDS: ${{ secrets.SOURCE_RETENTLY_TEST_CREDS }}
+ SOURCE_SENTRY_TEST_CREDS: ${{ secrets.SOURCE_SENTRY_TEST_CREDS }}
+ SOURCE_FRESHSALES_TEST_CREDS: ${{ secrets.SOURCE_FRESHSALES_TEST_CREDS }}
+ SOURCE_MONDAY_TEST_CREDS: ${{ secrets.SOURCE_MONDAY_TEST_CREDS }}
- run: |
./tools/bin/ci_integration_test.sh ${{ github.event.inputs.connector }}
name: test ${{ github.event.inputs.connector }}
diff --git a/.vscode/settings.json b/.vscode/settings.json
deleted file mode 100644
index 4f81299a37cfc..0000000000000
--- a/.vscode/settings.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "java.configuration.updateBuildConfiguration": "automatic"
-}
diff --git a/README.md b/README.md
index aa1b60a45f528..719944ec993e8 100644
--- a/README.md
+++ b/README.md
@@ -34,8 +34,6 @@ Now visit [http://localhost:8000](http://localhost:8000)
Here is a [step-by-step guide](https://github.com/airbytehq/airbyte/tree/e378d40236b6a34e1c1cb481c8952735ec687d88/docs/quickstart/getting-started.md) showing you how to load data from an API into a file, all on your computer.
-If you want to schedule a 20-min call with our team to help you get set up, please select [some time directly here](https://calendly.com/nataliekwong/airbyte-onboarding).
-
## Features
* **Built for extensibility**: Adapt an existing connector to your needs or build a new one with ease.
diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java b/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java
index 6e87ce1f45172..0918ece248910 100644
--- a/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java
+++ b/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java
@@ -22,17 +22,17 @@ public class LineGobbler implements VoidCallable {
private final static Logger LOGGER = LoggerFactory.getLogger(LineGobbler.class);
public static void gobble(final InputStream is, final Consumer consumer) {
- gobble(is, consumer, "generic", MdcScope.DEFAULT);
+ gobble(is, consumer, "generic", MdcScope.DEFAULT_BUILDER);
}
- public static void gobble(final InputStream is, final Consumer consumer, final MdcScope mdcScope) {
- gobble(is, consumer, "generic", mdcScope);
+ public static void gobble(final InputStream is, final Consumer consumer, final MdcScope.Builder mdcScopeBuilder) {
+ gobble(is, consumer, "generic", mdcScopeBuilder);
}
- public static void gobble(final InputStream is, final Consumer consumer, final String caller, final MdcScope mdcScope) {
+ public static void gobble(final InputStream is, final Consumer consumer, final String caller, final MdcScope.Builder mdcScopeBuilder) {
final ExecutorService executor = Executors.newSingleThreadExecutor();
final Map mdc = MDC.getCopyOfContextMap();
- final var gobbler = new LineGobbler(is, consumer, executor, mdc, caller, mdcScope);
+ final var gobbler = new LineGobbler(is, consumer, executor, mdc, caller, mdcScopeBuilder);
executor.submit(gobbler);
}
@@ -41,21 +41,21 @@ public static void gobble(final InputStream is, final Consumer consumer,
private final ExecutorService executor;
private final Map mdc;
private final String caller;
- private final MdcScope containerLogMDC;
+ private final MdcScope.Builder containerLogMdcBuilder;
LineGobbler(final InputStream is,
final Consumer consumer,
final ExecutorService executor,
final Map mdc) {
- this(is, consumer, executor, mdc, "generic", MdcScope.DEFAULT);
+ this(is, consumer, executor, mdc, "generic", MdcScope.DEFAULT_BUILDER);
}
LineGobbler(final InputStream is,
final Consumer consumer,
final ExecutorService executor,
final Map mdc,
- final MdcScope mdcScope) {
- this(is, consumer, executor, mdc, "generic", mdcScope);
+ final MdcScope.Builder mdcScopeBuilder) {
+ this(is, consumer, executor, mdc, "generic", mdcScopeBuilder);
}
LineGobbler(final InputStream is,
@@ -63,13 +63,13 @@ public static void gobble(final InputStream is, final Consumer consumer,
final ExecutorService executor,
final Map mdc,
final String caller,
- final MdcScope mdcScope) {
+ final MdcScope.Builder mdcScopeBuilder) {
this.is = IOs.newBufferedReader(is);
this.consumer = consumer;
this.executor = executor;
this.mdc = mdc;
this.caller = caller;
- this.containerLogMDC = mdcScope;
+ this.containerLogMdcBuilder = mdcScopeBuilder;
}
@Override
@@ -78,7 +78,7 @@ public void voidCall() {
try {
String line;
while ((line = is.readLine()) != null) {
- try (containerLogMDC) {
+ try (final var mdcScope = containerLogMdcBuilder.build()) {
consumer.accept(line);
}
}
diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/logging/MdcScope.java b/airbyte-commons/src/main/java/io/airbyte/commons/logging/MdcScope.java
index 190c1e9ebb198..b4c2f4c46f0e5 100644
--- a/airbyte-commons/src/main/java/io/airbyte/commons/logging/MdcScope.java
+++ b/airbyte-commons/src/main/java/io/airbyte/commons/logging/MdcScope.java
@@ -17,7 +17,7 @@
*
*
* try(final ScopedMDCChange scopedMDCChange = new ScopedMDCChange(
- * new HashMap() {{
+ * new HashMap<String, String>() {{
* put("my", "value");
* }}
* )) {
@@ -28,7 +28,7 @@
*/
public class MdcScope implements AutoCloseable {
- public final static MdcScope DEFAULT = new Builder().build();
+ public final static MdcScope.Builder DEFAULT_BUILDER = new Builder();
private final Map originalContextMap;
diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/util/AutoCloseableIterators.java b/airbyte-commons/src/main/java/io/airbyte/commons/util/AutoCloseableIterators.java
index 6154cce7219f5..65a07187cd733 100644
--- a/airbyte-commons/src/main/java/io/airbyte/commons/util/AutoCloseableIterators.java
+++ b/airbyte-commons/src/main/java/io/airbyte/commons/util/AutoCloseableIterators.java
@@ -27,8 +27,8 @@ public static AutoCloseableIterator fromIterator(final Iterator iterat
}
/**
- * Coerces a vanilla {@link Iterator} into a {@link AutoCloseableIterator}. The provided
- * {@param onClose} function will be called at most one time.
+ * Coerces a vanilla {@link Iterator} into a {@link AutoCloseableIterator}. The provided onClose
+ * function will be called at most one time.
*
* @param iterator autocloseable iterator to add another close to
* @param onClose the function that will be called on close
diff --git a/airbyte-commons/src/main/resources/log4j2.xml b/airbyte-commons/src/main/resources/log4j2.xml
index c07bd80c4b319..f682d8017e876 100644
--- a/airbyte-commons/src/main/resources/log4j2.xml
+++ b/airbyte-commons/src/main/resources/log4j2.xml
@@ -7,7 +7,7 @@
%replace{%X{log_source} - }{^ - }{}%d{yyyy-MM-dd HH:mm:ss}{GMT+0} %p (%X{job_root}) %C{1}(%M):%L - %replace{%m}{apikey=[\w\-]*}{apikey=*****}%n
- %d{yyyy-MM-dd HH:mm:ss} %-5p %replace{%m}{apikey=[\w\-]*}{apikey=*****}%n
+ %replace{%X{log_source} - }{^ - }{}%d{yyyy-MM-dd HH:mm:ss}{GMT+0} %p %C{1}(%M):%L - %replace{%m}{apikey=[\w\-]*}{apikey=*****}%n$${env:LOG_LEVEL:-INFO}
diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/util/AutoCloseableIteratorsTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/util/AutoCloseableIteratorsTest.java
index 26f160c06a1f6..145e6565454e6 100644
--- a/airbyte-commons/src/test/java/io/airbyte/commons/util/AutoCloseableIteratorsTest.java
+++ b/airbyte-commons/src/test/java/io/airbyte/commons/util/AutoCloseableIteratorsTest.java
@@ -9,7 +9,6 @@
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@@ -18,6 +17,7 @@
import io.airbyte.commons.concurrency.VoidCallable;
import java.util.Iterator;
import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
@@ -38,7 +38,10 @@ void testFromIterator() throws Exception {
@Test
void testFromStream() throws Exception {
- final Stream stream = spy(Stream.of("a", "b", "c"));
+ final AtomicBoolean isClosed = new AtomicBoolean(false);
+ final Stream stream = Stream.of("a", "b", "c");
+ stream.onClose(() -> isClosed.set(true));
+
final AutoCloseableIterator iterator = AutoCloseableIterators.fromStream(stream);
assertNext(iterator, "a");
@@ -46,7 +49,7 @@ void testFromStream() throws Exception {
assertNext(iterator, "c");
iterator.close();
- verify(stream).close();
+ assertTrue(isClosed.get());
}
private void assertNext(final Iterator iterator, final String value) {
diff --git a/airbyte-config/init/Dockerfile b/airbyte-config/init/Dockerfile
index afba972dedf19..89bd5491e53a6 100644
--- a/airbyte-config/init/Dockerfile
+++ b/airbyte-config/init/Dockerfile
@@ -1,4 +1,5 @@
-FROM alpine:3.4 AS seed
+ARG ALPINE_IMAGE=alpine:3.4
+FROM ${ALPINE_IMAGE} AS seed
WORKDIR /app
diff --git a/airbyte-config/init/build.gradle b/airbyte-config/init/build.gradle
index 945051223ea9f..c7117fd16ea78 100644
--- a/airbyte-config/init/build.gradle
+++ b/airbyte-config/init/build.gradle
@@ -6,4 +6,8 @@ dependencies {
implementation 'commons-cli:commons-cli:1.4'
implementation project(':airbyte-config:models')
+ implementation project(':airbyte-config:persistence')
+ implementation project(':airbyte-protocol:models')
+ implementation project(':airbyte-commons-docker')
+ implementation project(':airbyte-json-validation')
}
diff --git a/airbyte-config/init/src/main/java/io/airbyte/config/init/SeedType.java b/airbyte-config/init/src/main/java/io/airbyte/config/init/SeedType.java
index 47c4c419bcf10..3730369621090 100644
--- a/airbyte-config/init/src/main/java/io/airbyte/config/init/SeedType.java
+++ b/airbyte-config/init/src/main/java/io/airbyte/config/init/SeedType.java
@@ -7,7 +7,9 @@
public enum SeedType {
STANDARD_SOURCE_DEFINITION("/seed/source_definitions.yaml", "sourceDefinitionId"),
- STANDARD_DESTINATION_DEFINITION("/seed/destination_definitions.yaml", "destinationDefinitionId");
+ STANDARD_DESTINATION_DEFINITION("/seed/destination_definitions.yaml", "destinationDefinitionId"),
+ SOURCE_SPEC("/seed/source_specs.yaml", "dockerImage"),
+ DESTINATION_SPEC("/seed/destination_specs.yaml", "dockerImage");
final String resourcePath;
// ID field name
diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/YamlSeedConfigPersistence.java b/airbyte-config/init/src/main/java/io/airbyte/config/init/YamlSeedConfigPersistence.java
similarity index 63%
rename from airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/YamlSeedConfigPersistence.java
rename to airbyte-config/init/src/main/java/io/airbyte/config/init/YamlSeedConfigPersistence.java
index 3bca71d57c2db..d218cfca89d4a 100644
--- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/YamlSeedConfigPersistence.java
+++ b/airbyte-config/init/src/main/java/io/airbyte/config/init/YamlSeedConfigPersistence.java
@@ -2,23 +2,27 @@
* Copyright (c) 2021 Airbyte, Inc., all rights reserved.
*/
-package io.airbyte.config.persistence;
+package io.airbyte.config.init;
import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Resources;
+import io.airbyte.commons.docker.DockerUtils;
import io.airbyte.commons.json.Jsons;
import io.airbyte.commons.util.MoreIterators;
import io.airbyte.commons.yaml.Yamls;
import io.airbyte.config.AirbyteConfig;
import io.airbyte.config.ConfigSchema;
-import io.airbyte.config.init.SeedType;
+import io.airbyte.config.persistence.ConfigNotFoundException;
+import io.airbyte.config.persistence.ConfigPersistence;
import io.airbyte.validation.json.JsonValidationException;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -45,11 +49,40 @@ public static YamlSeedConfigPersistence get(final Class> seedDefinitionsResour
return new YamlSeedConfigPersistence(seedDefinitionsResourceClass);
}
- private YamlSeedConfigPersistence(final Class> seedDefinitionsResourceClass) throws IOException {
+ private YamlSeedConfigPersistence(final Class> seedResourceClass) throws IOException {
+ final Map sourceDefinitionConfigs = getConfigs(seedResourceClass, SeedType.STANDARD_SOURCE_DEFINITION);
+ final Map sourceSpecConfigs = getConfigs(seedResourceClass, SeedType.SOURCE_SPEC);
+ final Map fullSourceDefinitionConfigs = sourceDefinitionConfigs.entrySet().stream()
+ .collect(Collectors.toMap(Entry::getKey, e -> mergeSpecIntoDefinition(e.getValue(), sourceSpecConfigs)));
+
+ final Map destinationDefinitionConfigs = getConfigs(seedResourceClass, SeedType.STANDARD_DESTINATION_DEFINITION);
+ final Map destinationSpecConfigs = getConfigs(seedResourceClass, SeedType.DESTINATION_SPEC);
+ final Map fullDestinationDefinitionConfigs = destinationDefinitionConfigs.entrySet().stream()
+ .collect(Collectors.toMap(Entry::getKey, e -> mergeSpecIntoDefinition(e.getValue(), destinationSpecConfigs)));
+
this.allSeedConfigs = ImmutableMap.>builder()
- .put(SeedType.STANDARD_SOURCE_DEFINITION, getConfigs(seedDefinitionsResourceClass, SeedType.STANDARD_SOURCE_DEFINITION))
- .put(SeedType.STANDARD_DESTINATION_DEFINITION, getConfigs(seedDefinitionsResourceClass, SeedType.STANDARD_DESTINATION_DEFINITION))
- .build();
+ .put(SeedType.STANDARD_SOURCE_DEFINITION, fullSourceDefinitionConfigs)
+ .put(SeedType.STANDARD_DESTINATION_DEFINITION, fullDestinationDefinitionConfigs).build();
+ }
+
+ /**
+ * Merges the corresponding spec JSON into the definition JSON. This is necessary because specs are
+ * stored in a separate resource file from definitions.
+ *
+ * @param definitionJson JSON of connector definition that is missing a spec
+ * @param specConfigs map of docker image to JSON of docker image/connector spec pair
+ * @return JSON of connector definition including the connector spec
+ */
+ private JsonNode mergeSpecIntoDefinition(final JsonNode definitionJson, final Map specConfigs) {
+ final String dockerImage = DockerUtils.getTaggedImageName(
+ definitionJson.get("dockerRepository").asText(),
+ definitionJson.get("dockerImageTag").asText());
+ final JsonNode specConfigJson = specConfigs.get(dockerImage);
+ if (specConfigJson == null || specConfigJson.get("spec") == null) {
+ throw new UnsupportedOperationException(String.format("There is no seed spec for docker image %s", dockerImage));
+ }
+ ((ObjectNode) definitionJson).set("spec", specConfigJson.get("spec"));
+ return definitionJson;
}
@SuppressWarnings("UnstableApiUsage")
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/079d5540-f236-4294-ba7c-ade8fd918496.json b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/079d5540-f236-4294-ba7c-ade8fd918496.json
index ea1fd8ef6ef9b..2a948beb7b301 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/079d5540-f236-4294-ba7c-ade8fd918496.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/079d5540-f236-4294-ba7c-ade8fd918496.json
@@ -2,6 +2,6 @@
"destinationDefinitionId": "079d5540-f236-4294-ba7c-ade8fd918496",
"name": "BigQuery (denormalized typed struct)",
"dockerRepository": "airbyte/destination-bigquery-denormalized",
- "dockerImageTag": "0.1.6",
+ "dockerImageTag": "0.1.8",
"documentationUrl": "https://docs.airbyte.io/integrations/destinations/bigquery"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/2340cbba-358e-11ec-8d3d-0242ac130203.json b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/2340cbba-358e-11ec-8d3d-0242ac130203.json
new file mode 100644
index 0000000000000..d82499161c7b8
--- /dev/null
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/2340cbba-358e-11ec-8d3d-0242ac130203.json
@@ -0,0 +1,7 @@
+{
+ "destinationDefinitionId": "2340cbba-358e-11ec-8d3d-0242ac130203",
+ "name": "Pulsar",
+ "dockerRepository": "airbyte/destination-pulsar",
+ "dockerImageTag": "0.1.0",
+ "documentationUrl": "https://docs.airbyte.io/integrations/destinations/pulsar"
+}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/4816b78f-1489-44c1-9060-4b19d5fa9362.json b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/4816b78f-1489-44c1-9060-4b19d5fa9362.json
index 42a5b5150bf8d..9feab0c077547 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/4816b78f-1489-44c1-9060-4b19d5fa9362.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/4816b78f-1489-44c1-9060-4b19d5fa9362.json
@@ -2,6 +2,6 @@
"destinationDefinitionId": "4816b78f-1489-44c1-9060-4b19d5fa9362",
"name": "S3",
"dockerRepository": "airbyte/destination-s3",
- "dockerImageTag": "0.1.12",
+ "dockerImageTag": "0.1.13",
"documentationUrl": "https://docs.airbyte.io/integrations/destinations/s3"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/707456df-6f4f-4ced-b5c6-03f73bcad1c5.json b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/707456df-6f4f-4ced-b5c6-03f73bcad1c5.json
new file mode 100644
index 0000000000000..52e0f38dddfb7
--- /dev/null
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/707456df-6f4f-4ced-b5c6-03f73bcad1c5.json
@@ -0,0 +1,7 @@
+{
+ "destinationDefinitionId": "707456df-6f4f-4ced-b5c6-03f73bcad1c5",
+ "name": "Cassandra",
+ "dockerRepository": "airbyte/destination-cassandra",
+ "dockerImageTag": "0.1.0",
+ "documentationUrl": "https://docs.airbyte.io/integrations/destinations/cassandra"
+}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/ca8f6566-e555-4b40-943a-545bf123117a.json b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/ca8f6566-e555-4b40-943a-545bf123117a.json
index 89078e4019d88..b7e6e4fff2655 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/ca8f6566-e555-4b40-943a-545bf123117a.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_DESTINATION_DEFINITION/ca8f6566-e555-4b40-943a-545bf123117a.json
@@ -2,6 +2,6 @@
"destinationDefinitionId": "ca8f6566-e555-4b40-943a-545bf123117a",
"name": "Google Cloud Storage (GCS)",
"dockerRepository": "airbyte/destination-gcs",
- "dockerImageTag": "0.1.2",
+ "dockerImageTag": "0.1.3",
"documentationUrl": "https://docs.airbyte.io/integrations/destinations/gcs"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/010eb12f-837b-4685-892d-0a39f76a98f5.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/010eb12f-837b-4685-892d-0a39f76a98f5.json
index 701207e5790ff..58ceb7512f0c9 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/010eb12f-837b-4685-892d-0a39f76a98f5.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/010eb12f-837b-4685-892d-0a39f76a98f5.json
@@ -2,7 +2,7 @@
"sourceDefinitionId": "010eb12f-837b-4685-892d-0a39f76a98f5",
"name": "Facebook Pages",
"dockerRepository": "airbyte/source-facebook-pages",
- "dockerImageTag": "0.1.2",
+ "dockerImageTag": "0.1.3",
"documentationUrl": "https://hub.docker.com/r/airbyte/source-facebook-pages",
"icon": "facebook.svg"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/12928b32-bf0a-4f1e-964f-07e12e37153a.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/12928b32-bf0a-4f1e-964f-07e12e37153a.json
index cd73c479261a5..69ac7cf32c70a 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/12928b32-bf0a-4f1e-964f-07e12e37153a.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/12928b32-bf0a-4f1e-964f-07e12e37153a.json
@@ -2,7 +2,7 @@
"sourceDefinitionId": "12928b32-bf0a-4f1e-964f-07e12e37153a",
"name": "Mixpanel",
"dockerRepository": "airbyte/source-mixpanel",
- "dockerImageTag": "0.1.1",
+ "dockerImageTag": "0.1.3",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/mixpanel",
"icon": "mixpanel.svg"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/1d4fdb25-64fc-4569-92da-fcdca79a8372.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/1d4fdb25-64fc-4569-92da-fcdca79a8372.json
index 46fde36888a3b..34f0a63393b41 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/1d4fdb25-64fc-4569-92da-fcdca79a8372.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/1d4fdb25-64fc-4569-92da-fcdca79a8372.json
@@ -2,6 +2,6 @@
"sourceDefinitionId": "1d4fdb25-64fc-4569-92da-fcdca79a8372",
"name": "Okta",
"dockerRepository": "airbyte/source-okta",
- "dockerImageTag": "0.1.2",
+ "dockerImageTag": "0.1.4",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/okta"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/2e875208-0c0b-4ee4-9e92-1cb3156ea799.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/2e875208-0c0b-4ee4-9e92-1cb3156ea799.json
index 00fe3f7d0fba4..6cb4f103e6d2f 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/2e875208-0c0b-4ee4-9e92-1cb3156ea799.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/2e875208-0c0b-4ee4-9e92-1cb3156ea799.json
@@ -2,6 +2,6 @@
"sourceDefinitionId": "2e875208-0c0b-4ee4-9e92-1cb3156ea799",
"name": "Iterable",
"dockerRepository": "airbyte/source-iterable",
- "dockerImageTag": "0.1.9",
+ "dockerImageTag": "0.1.10",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/iterable"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/36c891d9-4bd9-43ac-bad2-10e12756272c.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/36c891d9-4bd9-43ac-bad2-10e12756272c.json
index 67f13cd7fff56..3af6a81ec4d29 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/36c891d9-4bd9-43ac-bad2-10e12756272c.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/36c891d9-4bd9-43ac-bad2-10e12756272c.json
@@ -2,7 +2,7 @@
"sourceDefinitionId": "36c891d9-4bd9-43ac-bad2-10e12756272c",
"name": "Hubspot",
"dockerRepository": "airbyte/source-hubspot",
- "dockerImageTag": "0.1.21",
+ "dockerImageTag": "0.1.23",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/hubspot",
"icon": "hubspot.svg"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/45d2e135-2ede-49e1-939f-3e3ec357a65e.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/45d2e135-2ede-49e1-939f-3e3ec357a65e.json
index 92988179fb2e4..eb89c73307bf8 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/45d2e135-2ede-49e1-939f-3e3ec357a65e.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/45d2e135-2ede-49e1-939f-3e3ec357a65e.json
@@ -2,6 +2,6 @@
"sourceDefinitionId": "45d2e135-2ede-49e1-939f-3e3ec357a65e",
"name": "Recharge",
"dockerRepository": "airbyte/source-recharge",
- "dockerImageTag": "0.1.3",
+ "dockerImageTag": "0.1.4",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/recharge"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/59f1e50a-331f-4f09-b3e8-2e8d4d355f44.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/59f1e50a-331f-4f09-b3e8-2e8d4d355f44.json
index d5d140972c959..7999f8cfe9eed 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/59f1e50a-331f-4f09-b3e8-2e8d4d355f44.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/59f1e50a-331f-4f09-b3e8-2e8d4d355f44.json
@@ -2,7 +2,7 @@
"sourceDefinitionId": "59f1e50a-331f-4f09-b3e8-2e8d4d355f44",
"name": "Greenhouse",
"dockerRepository": "airbyte/source-greenhouse",
- "dockerImageTag": "0.2.5",
+ "dockerImageTag": "0.2.6",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/greenhouse",
"icon": "greenhouse.svg"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json
index 6a3aecdaa2153..04bc61ae14ef1 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/79c1aa37-dae3-42ae-b333-d1c105477715.json
@@ -2,7 +2,7 @@
"sourceDefinitionId": "79c1aa37-dae3-42ae-b333-d1c105477715",
"name": "Zendesk Support",
"dockerRepository": "airbyte/source-zendesk-support",
- "dockerImageTag": "0.1.3",
+ "dockerImageTag": "0.1.4",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/zendesk-support",
"icon": "zendesk.svg"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/80a54ea2-9959-4040-aac1-eee42423ec9b.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/80a54ea2-9959-4040-aac1-eee42423ec9b.json
new file mode 100644
index 0000000000000..6f40313e0fb65
--- /dev/null
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/80a54ea2-9959-4040-aac1-eee42423ec9b.json
@@ -0,0 +1,7 @@
+{
+ "sourceDefinitionId": "80a54ea2-9959-4040-aac1-eee42423ec9b",
+ "name": "Monday",
+ "dockerRepository": "airbyte/source-zendesk-monday",
+ "dockerImageTag": "0.1.0",
+ "documentationUrl": "https://docs.airbyte.io/integrations/sources/monday"
+}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/9da77001-af33-4bcd-be46-6252bf9342b9.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/9da77001-af33-4bcd-be46-6252bf9342b9.json
index 90ce4d4c9d5e9..aeddeffbaf6e4 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/9da77001-af33-4bcd-be46-6252bf9342b9.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/9da77001-af33-4bcd-be46-6252bf9342b9.json
@@ -2,6 +2,6 @@
"sourceDefinitionId": "9da77001-af33-4bcd-be46-6252bf9342b9",
"name": "Shopify",
"dockerRepository": "airbyte/source-shopify",
- "dockerImageTag": "0.1.21",
+ "dockerImageTag": "0.1.22",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/shopify"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/b117307c-14b6-41aa-9422-947e34922962.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/b117307c-14b6-41aa-9422-947e34922962.json
index 9cf14666e7b14..b0f7d72a5c777 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/b117307c-14b6-41aa-9422-947e34922962.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/b117307c-14b6-41aa-9422-947e34922962.json
@@ -2,7 +2,7 @@
"sourceDefinitionId": "b117307c-14b6-41aa-9422-947e34922962",
"name": "Salesforce",
"dockerRepository": "airbyte/source-salesforce",
- "dockerImageTag": "0.1.2",
+ "dockerImageTag": "0.1.3",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/salesforce",
"icon": "salesforce.svg"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/b5ea17b1-f170-46dc-bc31-cc744ca984c1.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/b5ea17b1-f170-46dc-bc31-cc744ca984c1.json
index e65a9e63751d9..ff762f8c9f705 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/b5ea17b1-f170-46dc-bc31-cc744ca984c1.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/b5ea17b1-f170-46dc-bc31-cc744ca984c1.json
@@ -2,7 +2,7 @@
"sourceDefinitionId": "b5ea17b1-f170-46dc-bc31-cc744ca984c1",
"name": "Microsoft SQL Server (MSSQL)",
"dockerRepository": "airbyte/source-mssql",
- "dockerImageTag": "0.3.6",
+ "dockerImageTag": "0.3.8",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/mssql",
"icon": "mssql.svg"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/cdaf146a-9b75-49fd-9dd2-9d64a0bb4781.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/cdaf146a-9b75-49fd-9dd2-9d64a0bb4781.json
new file mode 100644
index 0000000000000..f88e4eedda821
--- /dev/null
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/cdaf146a-9b75-49fd-9dd2-9d64a0bb4781.json
@@ -0,0 +1,7 @@
+{
+ "sourceDefinitionId": "cdaf146a-9b75-49fd-9dd2-9d64a0bb4781",
+ "name": "Sentry",
+ "dockerRepository": "airbyte/source-sentry",
+ "dockerImageTag": "0.1.0",
+ "documentationUrl": "https://docs.airbyte.io/integrations/sources/sentry"
+}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d19ae824-e289-4b14-995a-0632eb46d246.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d19ae824-e289-4b14-995a-0632eb46d246.json
index 7e1649ea63744..5abea4626c530 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d19ae824-e289-4b14-995a-0632eb46d246.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/d19ae824-e289-4b14-995a-0632eb46d246.json
@@ -2,6 +2,6 @@
"sourceDefinitionId": "d19ae824-e289-4b14-995a-0632eb46d246",
"name": "Google Directory",
"dockerRepository": "airbyte/source-google-directory",
- "dockerImageTag": "0.1.5",
+ "dockerImageTag": "0.1.8",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/google-directory"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/decd338e-5647-4c0b-adf4-da0e75f5a750.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/decd338e-5647-4c0b-adf4-da0e75f5a750.json
index 1a952019a996f..2fd2cdf5d5d98 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/decd338e-5647-4c0b-adf4-da0e75f5a750.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/decd338e-5647-4c0b-adf4-da0e75f5a750.json
@@ -2,7 +2,7 @@
"sourceDefinitionId": "decd338e-5647-4c0b-adf4-da0e75f5a750",
"name": "Postgres",
"dockerRepository": "airbyte/source-postgres",
- "dockerImageTag": "0.3.9",
+ "dockerImageTag": "0.3.13",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/postgres",
"icon": "postgresql.svg"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e094cb9a-26de-4645-8761-65c0c425d1de.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e094cb9a-26de-4645-8761-65c0c425d1de.json
index 712f4c4558c82..327a7d21a5e48 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e094cb9a-26de-4645-8761-65c0c425d1de.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e094cb9a-26de-4645-8761-65c0c425d1de.json
@@ -2,7 +2,7 @@
"sourceDefinitionId": "e094cb9a-26de-4645-8761-65c0c425d1de",
"name": "Stripe",
"dockerRepository": "airbyte/source-stripe",
- "dockerImageTag": "0.1.21",
+ "dockerImageTag": "0.1.22",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/stripe",
"icon": "stripe.svg"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json
index da5565d41b5f2..798cb04b32b5a 100644
--- a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/e7778cfc-e97c-4458-9ecb-b4f2bba8946c.json
@@ -2,7 +2,7 @@
"sourceDefinitionId": "e7778cfc-e97c-4458-9ecb-b4f2bba8946c",
"name": "Facebook Marketing",
"dockerRepository": "airbyte/source-facebook-marketing",
- "dockerImageTag": "0.2.21",
+ "dockerImageTag": "0.2.22",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/facebook-marketing",
"icon": "facebook.svg"
}
diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/eca08d79-7b92-4065-b7f3-79c14836ebe7.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/eca08d79-7b92-4065-b7f3-79c14836ebe7.json
new file mode 100644
index 0000000000000..d74256e9d2816
--- /dev/null
+++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/eca08d79-7b92-4065-b7f3-79c14836ebe7.json
@@ -0,0 +1,7 @@
+{
+ "sourceDefinitionId": "eca08d79-7b92-4065-b7f3-79c14836ebe7",
+ "name": "Freshsales",
+ "dockerRepository": "airbyte/source-freshsales",
+ "dockerImageTag": "0.1.0",
+ "documentationUrl": "https://docs.airbyte.io/integrations/sources/freshsales"
+}
diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml
index 1d899ea73b42f..8e9644a95b98b 100644
--- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml
+++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml
@@ -11,8 +11,13 @@
- name: BigQuery (denormalized typed struct)
destinationDefinitionId: 079d5540-f236-4294-ba7c-ade8fd918496
dockerRepository: airbyte/destination-bigquery-denormalized
- dockerImageTag: 0.1.7
+ dockerImageTag: 0.1.8
documentationUrl: https://docs.airbyte.io/integrations/destinations/bigquery
+- name: Cassandra
+ destinationDefinitionId: 707456df-6f4f-4ced-b5c6-03f73bcad1c5
+ dockerRepository: airbyte/destination-cassandra
+ dockerImageTag: 0.1.0
+ documentationUrl: https://docs.airbyte.io/integrations/destinations/cassandra
- name: Chargify (Keen)
destinationDefinitionId: 81740ce8-d764-4ea7-94df-16bb41de36ae
dockerRepository: airbyte/destination-keen
@@ -31,7 +36,7 @@
- name: Google Cloud Storage (GCS)
destinationDefinitionId: ca8f6566-e555-4b40-943a-545bf123117a
dockerRepository: airbyte/destination-gcs
- dockerImageTag: 0.1.2
+ dockerImageTag: 0.1.3
documentationUrl: https://docs.airbyte.io/integrations/destinations/gcs
- name: Google PubSub
destinationDefinitionId: 356668e2-7e34-47f3-a3b0-67a8a481b692
@@ -84,6 +89,11 @@
dockerImageTag: 0.3.11
documentationUrl: https://docs.airbyte.io/integrations/destinations/postgres
icon: postgresql.svg
+- name: Pulsar
+ destinationDefinitionId: 2340cbba-358e-11ec-8d3d-0242ac130203
+ dockerRepository: airbyte/destination-pulsar
+ dockerImageTag: 0.1.0
+ documentationUrl: https://docs.airbyte.io/integrations/destinations/pulsar
- name: Redshift
destinationDefinitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc
dockerRepository: airbyte/destination-redshift
@@ -93,7 +103,7 @@
- name: S3
destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362
dockerRepository: airbyte/destination-s3
- dockerImageTag: 0.1.12
+ dockerImageTag: 0.1.13
documentationUrl: https://docs.airbyte.io/integrations/destinations/s3
- name: Snowflake
destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba
diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml
new file mode 100644
index 0000000000000..6142ea259b445
--- /dev/null
+++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml
@@ -0,0 +1,3051 @@
+# This file is generated by io.airbyte.config.specs.SeedConnectorSpecGenerator.
+# Do NOT edit this file directly. See generator class for more details.
+---
+- dockerImage: "airbyte/destination-azure-blob-storage:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/azureblobstorage"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "AzureBlobStorage Destination Spec"
+ type: "object"
+ required:
+ - "azure_blob_storage_account_name"
+ - "azure_blob_storage_account_key"
+ - "format"
+ additionalProperties: false
+ properties:
+ azure_blob_storage_endpoint_domain_name:
+ title: "Endpoint Domain Name"
+ type: "string"
+ default: "blob.core.windows.net"
+ description: "This is Azure Blob Storage endpoint domain name. Leave default\
+ \ value (or leave it empty if run container from command line) to use\
+ \ Microsoft native from example."
+ examples:
+ - "blob.core.windows.net"
+ azure_blob_storage_container_name:
+ title: "Azure blob storage container (Bucket) Name"
+ type: "string"
+ description: "The name of the Azure blob storage container. If not exists\
+ \ - will be created automatically. May be empty, then will be created\
+ \ automatically airbytecontainer+timestamp"
+ examples:
+ - "airbytetescontainername"
+ azure_blob_storage_account_name:
+ title: "Azure Blob Storage account name"
+ type: "string"
+ description: "The account's name of the Azure Blob Storage."
+ examples:
+ - "airbyte5storage"
+ azure_blob_storage_account_key:
+ description: "The Azure blob storage account key."
+ airbyte_secret: true
+ type: "string"
+ examples:
+ - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="
+ format:
+ title: "Output Format"
+ type: "object"
+ description: "Output data format"
+ oneOf:
+ - title: "CSV: Comma-Separated Values"
+ required:
+ - "format_type"
+ - "flattening"
+ properties:
+ format_type:
+ type: "string"
+ const: "CSV"
+ flattening:
+ type: "string"
+ title: "Normalization (Flattening)"
+ description: "Whether the input json data should be normalized (flattened)\
+ \ in the output CSV. Please refer to docs for details."
+ default: "No flattening"
+ enum:
+ - "No flattening"
+ - "Root level flattening"
+ - title: "JSON Lines: newline-delimited JSON"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ const: "JSONL"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-bigquery:0.5.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "BigQuery Destination Spec"
+ type: "object"
+ required:
+ - "project_id"
+ - "dataset_id"
+ additionalProperties: true
+ properties:
+ big_query_client_buffer_size_mb:
+ title: "Google BigQuery client chunk size"
+ description: "Google BigQuery client's chunk(buffer) size (MIN=1, MAX =\
+ \ 15) for each table. The default 15MiB value is used if not set explicitly.\
+ \ It's recommended to decrease value for big data sets migration for less\
+ \ HEAP memory consumption and avoiding crashes. For more details refer\
+ \ to https://googleapis.dev/python/bigquery/latest/generated/google.cloud.bigquery.client.Client.html"
+ type: "integer"
+ minimum: 1
+ maximum: 15
+ default: 15
+ examples:
+ - "15"
+ project_id:
+ type: "string"
+ description: "The GCP project ID for the project containing the target BigQuery\
+ \ dataset."
+ title: "Project ID"
+ dataset_id:
+ type: "string"
+ description: "Default BigQuery Dataset ID tables are replicated to if the\
+ \ source does not specify a namespace."
+ title: "Default Dataset ID"
+ dataset_location:
+ type: "string"
+ description: "The location of the dataset. Warning: Changes made after creation\
+ \ will not be applied."
+ title: "Dataset Location"
+ default: "US"
+ enum:
+ - "US"
+ - "EU"
+ - "asia-east1"
+ - "asia-east2"
+ - "asia-northeast1"
+ - "asia-northeast2"
+ - "asia-northeast3"
+ - "asia-south1"
+ - "asia-southeast1"
+ - "asia-southeast2"
+ - "australia-southeast1"
+ - "europe-central1"
+ - "europe-central2"
+ - "europe-north1"
+ - "europe-west1"
+ - "europe-west2"
+ - "europe-west3"
+ - "europe-west4"
+ - "europe-west5"
+ - "europe-west6"
+ - "northamerica-northeast1"
+ - "southamerica-east1"
+ - "us-central1"
+ - "us-east1"
+ - "us-east4"
+ - "us-west-1"
+ - "us-west-2"
+ - "us-west-3"
+ - "us-west-4"
+ credentials_json:
+ type: "string"
+ description: "The contents of the JSON service account key. Check out the\
+ \ docs if you need help generating this key. Default credentials will\
+ \ be used if this field is left empty."
+ title: "Credentials JSON"
+ airbyte_secret: true
+ transformation_priority:
+ type: "string"
+ description: "When running custom transformations or Basic normalization,\
+ \ running queries on interactive mode can hit BQ limits, choosing batch\
+ \ will solve those limitss."
+ title: "Transformation Query Run Type"
+ default: "interactive"
+ enum:
+ - "interactive"
+ - "batch"
+ loading_method:
+ type: "object"
+ title: "Loading Method"
+ description: "Loading method used to send select the way data will be uploaded\
+ \ to BigQuery."
+ oneOf:
+ - title: "Standard Inserts"
+ additionalProperties: false
+ description: "Direct uploading using streams."
+ required:
+ - "method"
+ properties:
+ method:
+ type: "string"
+ const: "Standard"
+ - title: "GCS Staging"
+ additionalProperties: false
+ description: "Writes large batches of records to a file, uploads the file\
+ \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\
+ \ for large production workloads for better speed and scalability."
+ required:
+ - "method"
+ - "gcs_bucket_name"
+ - "gcs_bucket_path"
+ - "credential"
+ properties:
+ method:
+ type: "string"
+ const: "GCS Staging"
+ gcs_bucket_name:
+ title: "GCS Bucket Name"
+ type: "string"
+ description: "The name of the GCS bucket."
+ examples:
+ - "airbyte_sync"
+ gcs_bucket_path:
+ description: "Directory under the GCS bucket where data will be written."
+ type: "string"
+ examples:
+ - "data_sync/test"
+ keep_files_in_gcs-bucket:
+ type: "string"
+ description: "This upload method is supposed to temporary store records\
+ \ in GCS bucket. What do you want to do with data in GCS bucket\
+ \ when migration has finished?"
+ title: "GCS tmp files afterward processing"
+ default: "Delete all tmp files from GCS"
+ enum:
+ - "Delete all tmp files from GCS"
+ - "Keep all tmp files in GCS"
+ credential:
+ title: "Credential"
+ type: "object"
+ oneOf:
+ - title: "HMAC key"
+ required:
+ - "credential_type"
+ - "hmac_key_access_id"
+ - "hmac_key_secret"
+ properties:
+ credential_type:
+ type: "string"
+ const: "HMAC_KEY"
+ hmac_key_access_id:
+ type: "string"
+ description: "HMAC key access ID. When linked to a service account,\
+ \ this ID is 61 characters long; when linked to a user account,\
+ \ it is 24 characters long."
+ title: "HMAC Key Access ID"
+ airbyte_secret: true
+ examples:
+ - "1234567890abcdefghij1234"
+ hmac_key_secret:
+ type: "string"
+ description: "The corresponding secret for the access ID. It\
+ \ is a 40-character base-64 encoded string."
+ title: "HMAC Key Secret"
+ airbyte_secret: true
+ examples:
+ - "1234567890abcdefghij1234567890ABCDEFGHIJ"
+ supportsIncremental: true
+ supportsNormalization: true
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ - "append_dedup"
+- dockerImage: "airbyte/destination-bigquery-denormalized:0.1.8"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "BigQuery Denormalized Typed Struct Destination Spec"
+ type: "object"
+ required:
+ - "project_id"
+ - "dataset_id"
+ additionalProperties: true
+ properties:
+ project_id:
+ type: "string"
+ description: "The GCP project ID for the project containing the target BigQuery\
+ \ dataset."
+ title: "Project ID"
+ dataset_id:
+ type: "string"
+ description: "Default BigQuery Dataset ID tables are replicated to if the\
+ \ source does not specify a namespace."
+ title: "Default Dataset ID"
+ dataset_location:
+ type: "string"
+ description: "The location of the dataset. Warning: Changes made after creation\
+ \ will not be applied."
+ title: "Dataset Location"
+ default: "US"
+ enum:
+ - "US"
+ - "EU"
+ - "asia-east1"
+ - "asia-east2"
+ - "asia-northeast1"
+ - "asia-northeast2"
+ - "asia-northeast3"
+ - "asia-south1"
+ - "asia-southeast1"
+ - "asia-southeast2"
+ - "australia-southeast1"
+ - "europe-central1"
+ - "europe-central2"
+ - "europe-north1"
+ - "europe-west1"
+ - "europe-west2"
+ - "europe-west3"
+ - "europe-west4"
+ - "europe-west5"
+ - "europe-west6"
+ - "northamerica-northeast1"
+ - "southamerica-east1"
+ - "us-central1"
+ - "us-east1"
+ - "us-east4"
+ - "us-west-1"
+ - "us-west-2"
+ - "us-west-3"
+ - "us-west-4"
+ credentials_json:
+ type: "string"
+ description: "The contents of the JSON service account key. Check out the\
+ \ docs if you need help generating this key. Default credentials will\
+ \ be used if this field is left empty."
+ title: "Credentials JSON"
+ airbyte_secret: true
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-cassandra:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/cassandra"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Cassandra Destination Spec"
+ type: "object"
+ required:
+ - "keyspace"
+ - "username"
+ - "password"
+ - "address"
+ - "port"
+ additionalProperties: true
+ properties:
+ keyspace:
+ title: "Keyspace"
+ description: "Default Cassandra keyspace to create data in."
+ type: "string"
+ order: 0
+ username:
+ title: "Username"
+ description: "Username to use to access Cassandra."
+ type: "string"
+ order: 1
+ password:
+ title: "Password"
+ description: "Password associated with Cassandra."
+ type: "string"
+ airbyte_secret: true
+ order: 2
+ address:
+ title: "Address"
+ description: "Address to connect to."
+ type: "string"
+ examples:
+ - "localhost,127.0.0.1"
+ order: 3
+ port:
+ title: "Port"
+ description: "Port of Cassandra."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 9042
+ order: 4
+ datacenter:
+ title: "Datacenter"
+ description: "Datacenter of the cassandra cluster."
+ type: "string"
+ default: "datacenter1"
+ order: 5
+ replication:
+ title: "Replication factor"
+ type: "integer"
+ description: "Indicates to how many nodes the data should be replicated\
+ \ to."
+ default: 1
+ order: 6
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-keen:0.2.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/keen"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Keen Spec"
+ type: "object"
+ required:
+ - "project_id"
+ - "api_key"
+ additionalProperties: false
+ properties:
+ project_id:
+ description: "Keen Project ID"
+ type: "string"
+ examples:
+ - "58b4acc22ba938934e888322e"
+ api_key:
+ title: "API Key"
+ description: "Keen Master API key"
+ type: "string"
+ examples:
+ - "ABCDEFGHIJKLMNOPRSTUWXYZ"
+ airbyte_secret: true
+ infer_timestamp:
+ title: "Infer Timestamp"
+ description: "Allow connector to guess keen.timestamp value based on the\
+ \ streamed data"
+ type: "boolean"
+ default: true
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-dynamodb:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/dynamodb"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "DynamoDB Destination Spec"
+ type: "object"
+ required:
+ - "dynamodb_table_name"
+ - "dynamodb_region"
+ - "access_key_id"
+ - "secret_access_key"
+ additionalProperties: false
+ properties:
+ dynamodb_endpoint:
+ title: "Endpoint"
+ type: "string"
+ default: ""
+ description: "This is your DynamoDB endpoint url.(if you are working with\
+ \ AWS DynamoDB, just leave empty)."
+ examples:
+ - "http://localhost:9000"
+ dynamodb_table_name:
+ title: "DynamoDB Table Name"
+ type: "string"
+ description: "The name of the DynamoDB table."
+ examples:
+ - "airbyte_sync"
+ dynamodb_region:
+ title: "DynamoDB Region"
+ type: "string"
+ default: ""
+ description: "The region of the DynamoDB."
+ enum:
+ - ""
+ - "us-east-1"
+ - "us-east-2"
+ - "us-west-1"
+ - "us-west-2"
+ - "af-south-1"
+ - "ap-east-1"
+ - "ap-south-1"
+ - "ap-northeast-1"
+ - "ap-northeast-2"
+ - "ap-northeast-3"
+ - "ap-southeast-1"
+ - "ap-southeast-2"
+ - "ca-central-1"
+ - "cn-north-1"
+ - "cn-northwest-1"
+ - "eu-central-1"
+ - "eu-north-1"
+ - "eu-south-1"
+ - "eu-west-1"
+ - "eu-west-2"
+ - "eu-west-3"
+ - "sa-east-1"
+ - "me-south-1"
+ - "us-gov-east-1"
+ - "us-gov-west-1"
+ access_key_id:
+ type: "string"
+ description: "The access key id to access the DynamoDB. Airbyte requires\
+ \ Read and Write permissions to the DynamoDB."
+ title: "DynamoDB Key Id"
+ airbyte_secret: true
+ examples:
+ - "A012345678910EXAMPLE"
+ secret_access_key:
+ type: "string"
+ description: "The corresponding secret to the access key id."
+ title: "DynamoDB Access Key"
+ airbyte_secret: true
+ examples:
+ - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-elasticsearch:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/elasticsearch"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Elasticsearch Connection Configuration"
+ type: "object"
+ required:
+ - "endpoint"
+ additionalProperties: false
+ properties:
+ endpoint:
+ title: "Server Endpoint"
+ type: "string"
+ description: "The full url of the Elasticsearch server"
+ upsert:
+ type: "boolean"
+ title: "Upsert Records"
+ description: "If a primary key identifier is defined in the source, an upsert\
+ \ will be performed using the primary key value as the elasticsearch doc\
+ \ id. Does not support composite primary keys."
+ default: true
+ authenticationMethod:
+ title: "Authentication Method"
+ type: "object"
+ description: "The type of authentication to be used"
+ oneOf:
+ - title: "None"
+ additionalProperties: false
+ description: "No authentication will be used"
+ required:
+ - "method"
+ properties:
+ method:
+ type: "string"
+ const: "none"
+ - title: "Api Key/Secret"
+ additionalProperties: false
+ description: "Use a api key and secret combination to authenticate"
+ required:
+ - "method"
+ - "apiKeyId"
+ - "apiKeySecret"
+ properties:
+ method:
+ type: "string"
+ const: "secret"
+ apiKeyId:
+ title: "API Key ID"
+ description: "The Key ID to used when accessing an enterprise Elasticsearch\
+ \ instance."
+ type: "string"
+ apiKeySecret:
+ title: "API Key Secret"
+ description: "The secret associated with the API Key ID."
+ type: "string"
+ airbyte_secret: true
+ - title: "Username/Password"
+ additionalProperties: false
+ description: "Basic auth header with a username and password"
+ required:
+ - "method"
+ - "username"
+ - "password"
+ properties:
+ method:
+ type: "string"
+ const: "basic"
+ username:
+ title: "Username"
+ description: "Basic auth username to access a secure Elasticsearch\
+ \ server"
+ type: "string"
+ password:
+ title: "Password"
+ description: "Basic auth password to access a secure Elasticsearch\
+ \ server"
+ type: "string"
+ airbyte_secret: true
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ supportsNamespaces: true
+- dockerImage: "airbyte/destination-gcs:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/gcs"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "GCS Destination Spec"
+ type: "object"
+ required:
+ - "gcs_bucket_name"
+ - "gcs_bucket_path"
+ - "gcs_bucket_region"
+ - "credential"
+ - "format"
+ additionalProperties: false
+ properties:
+ gcs_bucket_name:
+ title: "GCS Bucket Name"
+ type: "string"
+ description: "The name of the GCS bucket."
+ examples:
+ - "airbyte_sync"
+ gcs_bucket_path:
+ description: "Directory under the GCS bucket where data will be written."
+ type: "string"
+ examples:
+ - "data_sync/test"
+ gcs_bucket_region:
+ title: "GCS Bucket Region"
+ type: "string"
+ default: ""
+ description: "The region of the GCS bucket."
+ enum:
+ - ""
+ - "-- North America --"
+ - "northamerica-northeast1"
+ - "us-central1"
+ - "us-east1"
+ - "us-east4"
+ - "us-west1"
+ - "us-west2"
+ - "us-west3"
+ - "us-west4"
+ - "-- South America --"
+ - "southamerica-east1"
+ - "-- Europe --"
+ - "europe-central2"
+ - "europe-north1"
+ - "europe-west1"
+ - "europe-west2"
+ - "europe-west3"
+ - "europe-west4"
+ - "europe-west6"
+ - "-- Asia --"
+ - "asia-east1"
+ - "asia-east2"
+ - "asia-northeast1"
+ - "asia-northeast2"
+ - "asia-northeast3"
+ - "asia-south1"
+ - "asia-south2"
+ - "asia-southeast1"
+ - "asia-southeast2"
+ - "-- Australia --"
+ - "australia-southeast1"
+ - "australia-southeast2"
+ - "-- Multi-regions --"
+ - "asia"
+ - "eu"
+ - "us"
+ - "-- Dual-regions --"
+ - "asia1"
+ - "eur4"
+ - "nam4"
+ credential:
+ title: "Credential"
+ type: "object"
+ oneOf:
+ - title: "HMAC key"
+ required:
+ - "credential_type"
+ - "hmac_key_access_id"
+ - "hmac_key_secret"
+ properties:
+ credential_type:
+ type: "string"
+ enum:
+ - "HMAC_KEY"
+ default: "HMAC_KEY"
+ hmac_key_access_id:
+ type: "string"
+ description: "HMAC key access ID. When linked to a service account,\
+ \ this ID is 61 characters long; when linked to a user account,\
+ \ it is 24 characters long."
+ title: "HMAC Key Access ID"
+ airbyte_secret: true
+ examples:
+ - "1234567890abcdefghij1234"
+ hmac_key_secret:
+ type: "string"
+ description: "The corresponding secret for the access ID. It is a\
+ \ 40-character base-64 encoded string."
+ title: "HMAC Key Secret"
+ airbyte_secret: true
+ examples:
+ - "1234567890abcdefghij1234567890ABCDEFGHIJ"
+ format:
+ title: "Output Format"
+ type: "object"
+ description: "Output data format"
+ oneOf:
+ - title: "Avro: Apache Avro"
+ required:
+ - "format_type"
+ - "compression_codec"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "Avro"
+ default: "Avro"
+ compression_codec:
+ title: "Compression Codec"
+ description: "The compression algorithm used to compress data. Default\
+ \ to no compression."
+ type: "object"
+ oneOf:
+ - title: "no compression"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "no compression"
+ default: "no compression"
+ - title: "Deflate"
+ required:
+ - "codec"
+ - "compression_level"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "Deflate"
+ default: "Deflate"
+ compression_level:
+ title: "Deflate level"
+ description: "0: no compression & fastest, 9: best compression\
+ \ & slowest."
+ type: "integer"
+ default: 0
+ minimum: 0
+ maximum: 9
+ - title: "bzip2"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "bzip2"
+ default: "bzip2"
+ - title: "xz"
+ required:
+ - "codec"
+ - "compression_level"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "xz"
+ default: "xz"
+ compression_level:
+ title: "Compression level"
+ description: "See here for details."
+ type: "integer"
+ default: 6
+ minimum: 0
+ maximum: 9
+ - title: "zstandard"
+ required:
+ - "codec"
+ - "compression_level"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "zstandard"
+ default: "zstandard"
+ compression_level:
+ title: "Compression level"
+ description: "Negative levels are 'fast' modes akin to lz4 or\
+ \ snappy, levels above 9 are generally for archival purposes,\
+ \ and levels above 18 use a lot of memory."
+ type: "integer"
+ default: 3
+ minimum: -5
+ maximum: 22
+ include_checksum:
+ title: "Include checksum"
+ description: "If true, include a checksum with each data block."
+ type: "boolean"
+ default: false
+ - title: "snappy"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "snappy"
+ default: "snappy"
+ part_size_mb:
+ title: "Block Size (MB) for GCS multipart upload"
+ description: "This is the size of a \"Part\" being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will allow\
+ \ to upload a bigger files and improve the speed, but consumes9\
+ \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
+ type: "integer"
+ default: 5
+ examples:
+ - 5
+ - title: "CSV: Comma-Separated Values"
+ required:
+ - "format_type"
+ - "flattening"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "CSV"
+ default: "CSV"
+ flattening:
+ type: "string"
+ title: "Normalization (Flattening)"
+ description: "Whether the input json data should be normalized (flattened)\
+ \ in the output CSV. Please refer to docs for details."
+ default: "No flattening"
+ enum:
+ - "No flattening"
+ - "Root level flattening"
+ part_size_mb:
+ title: "Block Size (MB) for GCS multipart upload"
+ description: "This is the size of a \"Part\" being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will allow\
+ \ to upload a bigger files and improve the speed, but consumes9\
+ \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
+ type: "integer"
+ default: 5
+ examples:
+ - 5
+ - title: "JSON Lines: newline-delimited JSON"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "JSONL"
+ default: "JSONL"
+ part_size_mb:
+ title: "Block Size (MB) for GCS multipart upload"
+ description: "This is the size of a \"Part\" being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will allow\
+ \ to upload a bigger files and improve the speed, but consumes9\
+ \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
+ type: "integer"
+ default: 5
+ examples:
+ - 5
+ - title: "Parquet: Columnar Storage"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "Parquet"
+ default: "Parquet"
+ compression_codec:
+ title: "Compression Codec"
+ description: "The compression algorithm used to compress data pages."
+ type: "string"
+ enum:
+ - "UNCOMPRESSED"
+ - "SNAPPY"
+ - "GZIP"
+ - "LZO"
+ - "BROTLI"
+ - "LZ4"
+ - "ZSTD"
+ default: "UNCOMPRESSED"
+ block_size_mb:
+ title: "Block Size (Row Group Size) (MB)"
+ description: "This is the size of a row group being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will improve\
+ \ the IO when reading, but consume more memory when writing. Default:\
+ \ 128 MB."
+ type: "integer"
+ default: 128
+ examples:
+ - 128
+ max_padding_size_mb:
+ title: "Max Padding Size (MB)"
+ description: "Maximum size allowed as padding to align row groups.\
+ \ This is also the minimum size of a row group. Default: 8 MB."
+ type: "integer"
+ default: 8
+ examples:
+ - 8
+ page_size_kb:
+ title: "Page Size (KB)"
+ description: "The page size is for compression. A block is composed\
+ \ of pages. A page is the smallest unit that must be read fully\
+ \ to access a single record. If this value is too small, the compression\
+ \ will deteriorate. Default: 1024 KB."
+ type: "integer"
+ default: 1024
+ examples:
+ - 1024
+ dictionary_page_size_kb:
+ title: "Dictionary Page Size (KB)"
+ description: "There is one dictionary page per column per row group\
+ \ when dictionary encoding is used. The dictionary page size works\
+ \ like the page size but for dictionary. Default: 1024 KB."
+ type: "integer"
+ default: 1024
+ examples:
+ - 1024
+ dictionary_encoding:
+ title: "Dictionary Encoding"
+ description: "Default: true."
+ type: "boolean"
+ default: true
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ $schema: "http://json-schema.org/draft-07/schema#"
+- dockerImage: "airbyte/destination-pubsub:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/pubsub"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Google PubSub Destination Spec"
+ type: "object"
+ required:
+ - "project_id"
+ - "topic_id"
+ - "credentials_json"
+ additionalProperties: true
+ properties:
+ project_id:
+ type: "string"
+ description: "The GCP project ID for the project containing the target PubSub"
+ title: "Project ID"
+ topic_id:
+ type: "string"
+ description: "PubSub topic ID in the given GCP project ID"
+ title: "PubSub Topic ID"
+ credentials_json:
+ type: "string"
+ description: "The contents of the JSON service account key. Check out the\
+ \ docs if you need help generating this key."
+ title: "Credentials JSON"
+ airbyte_secret: true
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+- dockerImage: "airbyte/destination-kafka:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/kafka"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Kafka Destination Spec"
+ type: "object"
+ required:
+ - "bootstrap_servers"
+ - "topic_pattern"
+ - "protocol"
+ - "acks"
+ - "enable_idempotence"
+ - "compression_type"
+ - "batch_size"
+ - "linger_ms"
+ - "max_in_flight_requests_per_connection"
+ - "client_dns_lookup"
+ - "buffer_memory"
+ - "max_request_size"
+ - "retries"
+ - "socket_connection_setup_timeout_ms"
+ - "socket_connection_setup_timeout_max_ms"
+ - "max_block_ms"
+ - "request_timeout_ms"
+ - "delivery_timeout_ms"
+ - "send_buffer_bytes"
+ - "receive_buffer_bytes"
+ additionalProperties: true
+ properties:
+ bootstrap_servers:
+ title: "Bootstrap servers"
+ description: "A list of host/port pairs to use for establishing the initial\
+ \ connection to the Kafka cluster. The client will make use of all servers\
+ \ irrespective of which servers are specified here for bootstrapping—this\
+ \ list only impacts the initial hosts used to discover the full set of\
+ \ servers. This list should be in the form host1:port1,host2:port2,....\
+ \ Since these servers are just used for the initial connection to discover\
+ \ the full cluster membership (which may change dynamically), this list\
+ \ need not contain the full set of servers (you may want more than one,\
+ \ though, in case a server is down)."
+ type: "string"
+ examples:
+ - "kafka-broker1:9092,kafka-broker2:9092"
+ topic_pattern:
+ title: "Topic pattern"
+ description: "Topic pattern in which the records will be sent. You can use\
+ \ patterns like '{namespace}' and/or '{stream}' to send the message to\
+ \ a specific topic based on these values. Notice that the topic name will\
+ \ be transformed to a standard naming convention."
+ type: "string"
+ examples:
+ - "sample.topic"
+ - "{namespace}.{stream}.sample"
+ test_topic:
+ title: "Test topic"
+ description: "Topic to test if Airbyte can produce messages."
+ type: "string"
+ examples:
+ - "test.topic"
+ sync_producer:
+ title: "Sync producer"
+ description: "Wait synchronously until the record has been sent to Kafka."
+ type: "boolean"
+ default: false
+ protocol:
+ title: "Protocol"
+ type: "object"
+ description: "Protocol used to communicate with brokers."
+ oneOf:
+ - title: "PLAINTEXT"
+ required:
+ - "security_protocol"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "PLAINTEXT"
+ default: "PLAINTEXT"
+ - title: "SASL PLAINTEXT"
+ required:
+ - "security_protocol"
+ - "sasl_mechanism"
+ - "sasl_jaas_config"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "SASL_PLAINTEXT"
+ default: "SASL_PLAINTEXT"
+ sasl_mechanism:
+ title: "SASL mechanism"
+ description: "SASL mechanism used for client connections. This may\
+ \ be any mechanism for which a security provider is available."
+ type: "string"
+ default: "PLAIN"
+ enum:
+ - "PLAIN"
+ sasl_jaas_config:
+ title: "SASL JAAS config"
+ description: "JAAS login context parameters for SASL connections in\
+ \ the format used by JAAS configuration files."
+ type: "string"
+ default: ""
+ airbyte_secret: true
+ - title: "SASL SSL"
+ required:
+ - "security_protocol"
+ - "sasl_mechanism"
+ - "sasl_jaas_config"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "SASL_SSL"
+ default: "SASL_SSL"
+ sasl_mechanism:
+ title: "SASL mechanism"
+ description: "SASL mechanism used for client connections. This may\
+ \ be any mechanism for which a security provider is available."
+ type: "string"
+ default: "GSSAPI"
+ enum:
+ - "GSSAPI"
+ - "OAUTHBEARER"
+ - "SCRAM-SHA-256"
+ sasl_jaas_config:
+ title: "SASL JAAS config"
+ description: "JAAS login context parameters for SASL connections in\
+ \ the format used by JAAS configuration files."
+ type: "string"
+ default: ""
+ airbyte_secret: true
+ client_id:
+ title: "Client ID"
+ description: "An id string to pass to the server when making requests. The\
+ \ purpose of this is to be able to track the source of requests beyond\
+ \ just ip/port by allowing a logical application name to be included in\
+ \ server-side request logging."
+ type: "string"
+ examples:
+ - "airbyte-producer"
+ acks:
+ title: "ACKs"
+ description: "The number of acknowledgments the producer requires the leader\
+ \ to have received before considering a request complete. This controls\
+ \ the durability of records that are sent."
+ type: "string"
+ default: "1"
+ enum:
+ - "0"
+ - "1"
+ - "all"
+ enable_idempotence:
+ title: "Enable idempotence"
+ description: "When set to 'true', the producer will ensure that exactly\
+ \ one copy of each message is written in the stream. If 'false', producer\
+ \ retries due to broker failures, etc., may write duplicates of the retried\
+ \ message in the stream."
+ type: "boolean"
+ default: false
+ compression_type:
+ title: "Compression type"
+ description: "The compression type for all data generated by the producer."
+ type: "string"
+ default: "none"
+ enum:
+ - "none"
+ - "gzip"
+ - "snappy"
+ - "lz4"
+ - "zstd"
+ batch_size:
+ title: "Batch size"
+ description: "The producer will attempt to batch records together into fewer\
+ \ requests whenever multiple records are being sent to the same partition."
+ type: "integer"
+ examples:
+ - 16384
+ linger_ms:
+ title: "Linger ms"
+ description: "The producer groups together any records that arrive in between\
+ \ request transmissions into a single batched request."
+ type: "string"
+ examples:
+ - 0
+ max_in_flight_requests_per_connection:
+ title: "Max in flight requests per connection"
+ description: "The maximum number of unacknowledged requests the client will\
+ \ send on a single connection before blocking."
+ type: "integer"
+ examples:
+ - 5
+ client_dns_lookup:
+ title: "Client DNS lookup"
+ description: "Controls how the client uses DNS lookups. If set to use_all_dns_ips,\
+ \ connect to each returned IP address in sequence until a successful connection\
+ \ is established. After a disconnection, the next IP is used. Once all\
+ \ IPs have been used once, the client resolves the IP(s) from the hostname\
+ \ again. If set to resolve_canonical_bootstrap_servers_only, resolve each\
+ \ bootstrap address into a list of canonical names. After the bootstrap\
+ \ phase, this behaves the same as use_all_dns_ips. If set to default (deprecated),\
+ \ attempt to connect to the first IP address returned by the lookup, even\
+ \ if the lookup returns multiple IP addresses."
+ type: "string"
+ default: "use_all_dns_ips"
+ enum:
+ - "default"
+ - "use_all_dns_ips"
+ - "resolve_canonical_bootstrap_servers_only"
+ - "use_all_dns_ips"
+ buffer_memory:
+ title: "Buffer memory"
+ description: "The total bytes of memory the producer can use to buffer records\
+ \ waiting to be sent to the server."
+ type: "string"
+ examples: 33554432
+ max_request_size:
+ title: "Max request size"
+ description: "The maximum size of a request in bytes."
+ type: "integer"
+ examples:
+ - 1048576
+ retries:
+ title: "Retries"
+ description: "Setting a value greater than zero will cause the client to\
+ \ resend any record whose send fails with a potentially transient error."
+ type: "integer"
+ examples:
+ - 2147483647
+ socket_connection_setup_timeout_ms:
+ title: "Socket connection setup timeout"
+ description: "The amount of time the client will wait for the socket connection\
+ \ to be established."
+ type: "string"
+ examples:
+ - 10000
+ socket_connection_setup_timeout_max_ms:
+ title: "Socket connection setup max timeout"
+ description: "The maximum amount of time the client will wait for the socket\
+ \ connection to be established. The connection setup timeout will increase\
+ \ exponentially for each consecutive connection failure up to this maximum."
+ type: "string"
+ examples:
+ - 30000
+ max_block_ms:
+ title: "Max block ms"
+ description: "The configuration controls how long the KafkaProducer's send(),\
+ \ partitionsFor(), initTransactions(), sendOffsetsToTransaction(), commitTransaction()\
+ \ and abortTransaction() methods will block."
+ type: "string"
+ examples:
+ - 60000
+ request_timeout_ms:
+ title: "Request timeout"
+ description: "The configuration controls the maximum amount of time the\
+ \ client will wait for the response of a request. If the response is not\
+ \ received before the timeout elapses the client will resend the request\
+ \ if necessary or fail the request if retries are exhausted."
+ type: "integer"
+ examples:
+ - 30000
+ delivery_timeout_ms:
+ title: "Delivery timeout"
+ description: "An upper bound on the time to report success or failure after\
+ \ a call to 'send()' returns."
+ type: "integer"
+ examples:
+ - 120000
+ send_buffer_bytes:
+ title: "Send buffer bytes"
+ description: "The size of the TCP send buffer (SO_SNDBUF) to use when sending\
+ \ data. If the value is -1, the OS default will be used."
+ type: "integer"
+ examples:
+ - 131072
+ receive_buffer_bytes:
+ title: "Receive buffer bytes"
+ description: "The size of the TCP receive buffer (SO_RCVBUF) to use when\
+ \ reading data. If the value is -1, the OS default will be used."
+ type: "integer"
+ examples:
+ - 32768
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+- dockerImage: "airbyte/destination-csv:0.2.8"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/local-csv"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "CSV Destination Spec"
+ type: "object"
+ required:
+ - "destination_path"
+ additionalProperties: false
+ properties:
+ destination_path:
+ description: "Path to the directory where csv files will be written. The\
+ \ destination uses the local mount \"/local\" and any data files will\
+ \ be placed inside that local mount. For more information check out our\
+ \ docs"
+ type: "string"
+ examples:
+ - "/local"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-local-json:0.2.8"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/local-json"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Local Json Destination Spec"
+ type: "object"
+ required:
+ - "destination_path"
+ additionalProperties: false
+ properties:
+ destination_path:
+ description: "Path to the directory where json files will be written. The\
+ \ files will be placed inside that local mount. For more information check\
+ \ out our docs"
+ type: "string"
+ examples:
+ - "/json_data"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-mssql:0.1.10"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MS SQL Server Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "username"
+ - "database"
+ - "schema"
+ additionalProperties: true
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 1433
+ examples:
+ - "1433"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ schema:
+ title: "Default Schema"
+ description: "The default schema tables are written to if the source does\
+ \ not specify a namespace. The usual value for this field is \"public\"\
+ ."
+ type: "string"
+ examples:
+ - "public"
+ default: "public"
+ order: 3
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ order: 4
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 5
+ ssl_method:
+ title: "SSL Method"
+ type: "object"
+ description: "Encryption method to use when communicating with the database"
+ order: 6
+ oneOf:
+ - title: "Unencrypted"
+ additionalProperties: false
+ description: "Data transfer will not be encrypted."
+ required:
+ - "ssl_method"
+ type: "object"
+ properties:
+ ssl_method:
+ type: "string"
+ enum:
+ - "unencrypted"
+ default: "unencrypted"
+ - title: "Encrypted (trust server certificate)"
+ additionalProperties: false
+ description: "Use the cert provided by the server without verification.\
+ \ (For testing purposes only!)"
+ required:
+ - "ssl_method"
+ type: "object"
+ properties:
+ ssl_method:
+ type: "string"
+ enum:
+ - "encrypted_trust_server_certificate"
+ default: "encrypted_trust_server_certificate"
+ - title: "Encrypted (verify certificate)"
+ additionalProperties: false
+ description: "Verify and use the cert provided by the server."
+ required:
+ - "ssl_method"
+ - "trustStoreName"
+ - "trustStorePassword"
+ type: "object"
+ properties:
+ ssl_method:
+ type: "string"
+ enum:
+ - "encrypted_verify_certificate"
+ default: "encrypted_verify_certificate"
+ hostNameInCertificate:
+ title: "Host Name In Certificate"
+ type: "string"
+ description: "Specifies the host name of the server. The value of\
+ \ this property must match the subject property of the certificate."
+ order: 7
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsIncremental: true
+ supportsNormalization: true
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ - "append_dedup"
+- dockerImage: "airbyte/destination-meilisearch:0.2.10"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/meilisearch"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MeiliSearch Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ additionalProperties: true
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the MeiliSearch instance"
+ type: "string"
+ order: 0
+ api_key:
+ title: "API Key"
+ airbyte_secret: true
+ description: "MeiliSearch instance API Key"
+ type: "string"
+ order: 1
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-mongodb:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/mongodb"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MongoDB Destination Spec"
+ type: "object"
+ required:
+ - "database"
+ - "auth_type"
+ additionalProperties: true
+ properties:
+ instance_type:
+ description: "MongoDb instance to connect to. For MongoDB Atlas and Replica\
+ \ Set TLS connection is used by default."
+ title: "MongoDb instance type"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "Standalone MongoDb Instance"
+ required:
+ - "instance"
+ - "host"
+ - "port"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "standalone"
+ default: "standalone"
+ host:
+ title: "Host"
+ type: "string"
+ description: "Host of a Mongo database to be replicated."
+ order: 0
+ port:
+ title: "Port"
+ type: "integer"
+ description: "Port of a Mongo database to be replicated."
+ minimum: 0
+ maximum: 65536
+ default: 27017
+ examples:
+ - "27017"
+ order: 1
+ tls:
+ title: "TLS connection"
+ type: "boolean"
+ description: "Indicates whether TLS encryption protocol will be used\
+ \ to connect to MongoDB. It is recommended to use TLS connection\
+ \ if possible. For more information see documentation."
+ default: false
+ order: 2
+ - title: "Replica Set"
+ required:
+ - "instance"
+ - "server_addresses"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "replica"
+ default: "replica"
+ server_addresses:
+ title: "Server addresses"
+ type: "string"
+ description: "The members of a replica set. Please specify `host`:`port`\
+ \ of each member seperated by comma."
+ examples:
+ - "host1:27017,host2:27017,host3:27017"
+ order: 0
+ replica_set:
+ title: "Replica Set"
+ type: "string"
+ description: "A replica set name."
+ order: 1
+ - title: "MongoDB Atlas"
+ additionalProperties: false
+ required:
+ - "instance"
+ - "cluster_url"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "atlas"
+ default: "atlas"
+ cluster_url:
+ title: "Cluster URL"
+ type: "string"
+ description: "URL of a cluster to connect to."
+ order: 0
+ database:
+ title: "DB Name"
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ auth_type:
+ title: "Authorization type"
+ type: "object"
+ description: "Authorization type."
+ oneOf:
+ - title: "None"
+ additionalProperties: false
+ description: "None."
+ required:
+ - "authorization"
+ type: "object"
+ properties:
+ authorization:
+ type: "string"
+ const: "none"
+ - title: "Login/Password"
+ additionalProperties: false
+ description: "Login/Password."
+ required:
+ - "authorization"
+ - "username"
+ - "password"
+ type: "object"
+ properties:
+ authorization:
+ type: "string"
+ const: "login/password"
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ order: 1
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 2
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-mysql:0.1.13"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/mysql"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MySQL Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "username"
+ - "database"
+ additionalProperties: true
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 3306
+ examples:
+ - "3306"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ order: 3
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ ssl:
+ title: "SSL Connection"
+ description: "Encrypt data using SSL."
+ type: "boolean"
+ default: true
+ order: 5
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials for logging\
+ \ into the jump server host."
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsIncremental: true
+ supportsNormalization: true
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-oracle:0.1.11"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/oracle"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Oracle Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "username"
+ - "sid"
+ additionalProperties: true
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 1521
+ examples:
+ - "1521"
+ order: 1
+ sid:
+ title: "SID"
+ description: "SID"
+ type: "string"
+ order: 2
+ username:
+ title: "User"
+ description: "Username to use to access the database. This user must have\
+ \ CREATE USER privileges in the database."
+ type: "string"
+ order: 3
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ schema:
+ title: "Default Schema"
+ description: "The default schema tables are written to if the source does\
+ \ not specify a namespace. The usual value for this field is \"airbyte\"\
+ . In Oracle, schemas and users are the same thing, so the \"user\" parameter\
+ \ is used as the login credentials and this is used for the default Airbyte\
+ \ message schema."
+ type: "string"
+ examples:
+ - "airbyte"
+ default: "airbyte"
+ order: 5
+ encryption:
+ title: "Encryption"
+ type: "object"
+ description: "Encryption method to use when communicating with the database"
+ order: 6
+ oneOf:
+ - title: "Unencrypted"
+ additionalProperties: false
+ description: "Data transfer will not be encrypted."
+ required:
+ - "encryption_method"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "unencrypted"
+ enum:
+ - "unencrypted"
+ default: "unencrypted"
+ - title: "Native Network Ecryption (NNE)"
+ additionalProperties: false
+ description: "Native network encryption gives you the ability to encrypt\
+ \ database connections, without the configuration overhead of TCP/IP\
+ \ and SSL/TLS and without the need to open and listen on different ports."
+ required:
+ - "encryption_method"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "client_nne"
+ enum:
+ - "client_nne"
+ default: "client_nne"
+ encryption_algorithm:
+ type: "string"
+ description: "This parameter defines the encryption algorithm to be\
+ \ used"
+ title: "Encryption Algorithm"
+ default: "AES256"
+ enum:
+ - "AES256"
+ - "RC4_56"
+ - "3DES168"
+ - title: "TLS Encrypted (verify certificate)"
+ additionalProperties: false
+ description: "Verify and use the cert provided by the server."
+ required:
+ - "encryption_method"
+ - "ssl_certificate"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "encrypted_verify_certificate"
+ enum:
+ - "encrypted_verify_certificate"
+ default: "encrypted_verify_certificate"
+ ssl_certificate:
+ title: "SSL PEM file"
+ description: "Privacy Enhanced Mail (PEM) files are concatenated certificate\
+ \ containers frequently used in certificate installations"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-postgres:0.3.11"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/postgres"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Postgres Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "username"
+ - "database"
+ - "schema"
+ additionalProperties: true
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 5432
+ examples:
+ - "5432"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ schema:
+ title: "Default Schema"
+ description: "The default schema tables are written to if the source does\
+ \ not specify a namespace. The usual value for this field is \"public\"\
+ ."
+ type: "string"
+ examples:
+ - "public"
+ default: "public"
+ order: 3
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ order: 4
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 5
+ ssl:
+ title: "SSL Connection"
+ description: "Encrypt data using SSL."
+ type: "boolean"
+ default: false
+ order: 6
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials for logging\
+ \ into the jump server host."
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsIncremental: true
+ supportsNormalization: true
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ - "append_dedup"
+- dockerImage: "airbyte/destination-pulsar:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/pulsar"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Pulsar Destination Spec"
+ type: "object"
+ required:
+ - "brokers"
+ - "use_tls"
+ - "topic_type"
+ - "topic_tenant"
+ - "topic_namespace"
+ - "topic_pattern"
+ - "compression_type"
+ - "send_timeout_ms"
+ - "max_pending_messages"
+ - "max_pending_messages_across_partitions"
+ - "batching_enabled"
+ - "batching_max_messages"
+ - "batching_max_publish_delay"
+ - "block_if_queue_full"
+ additionalProperties: true
+ properties:
+ brokers:
+ title: "Pulsar brokers"
+ description: "A list of host/port pairs to use for establishing the initial\
+ \ connection to the Pulsar cluster."
+ type: "string"
+ examples:
+ - "broker1:6650,broker2:6650"
+ use_tls:
+ title: "Use TLS"
+ description: "Whether to use TLS encryption on the connection."
+ type: "boolean"
+ default: false
+ topic_type:
+ title: "Topic type"
+ description: "It identifies type of topic. Pulsar supports two kind of topics:\
+ \ persistent and non-persistent. In persistent topic, all messages are\
+ \ durably persisted on disk (that means on multiple disks unless the broker\
+ \ is standalone), whereas non-persistent topic does not persist message\
+ \ into storage disk."
+ type: "string"
+ default: "persistent"
+ enum:
+ - "persistent"
+ - "non-persistent"
+ topic_tenant:
+ title: "Topic tenant"
+ description: "The topic tenant within the instance. Tenants are essential\
+ \ to multi-tenancy in Pulsar, and spread across clusters."
+ type: "string"
+ default: "public"
+ examples:
+ - "public"
+ topic_namespace:
+ title: "Topic namespace"
+ description: "The administrative unit of the topic, which acts as a grouping\
+ \ mechanism for related topics. Most topic configuration is performed\
+ \ at the namespace level. Each tenant has one or multiple namespaces."
+ type: "string"
+ default: "default"
+ examples:
+ - "default"
+ topic_pattern:
+ title: "Topic pattern"
+ description: "Topic pattern in which the records will be sent. You can use\
+ \ patterns like '{namespace}' and/or '{stream}' to send the message to\
+ \ a specific topic based on these values. Notice that the topic name will\
+ \ be transformed to a standard naming convention."
+ type: "string"
+ examples:
+ - "sample.topic"
+ - "{namespace}.{stream}.sample"
+ topic_test:
+ title: "Test topic"
+ description: "Topic to test if Airbyte can produce messages."
+ type: "string"
+ examples:
+ - "test.topic"
+ producer_name:
+ title: "Producer name"
+ description: "Name for the producer. If not filled, the system will generate\
+ \ a globally unique name which can be accessed with."
+ type: "string"
+ examples:
+ - "airbyte-producer"
+ producer_sync:
+ title: "Sync producer"
+ description: "Wait synchronously until the record has been sent to Pulsar."
+ type: "boolean"
+ default: false
+ compression_type:
+ title: "Compression type"
+ description: "Compression type for the producer."
+ type: "string"
+ default: "NONE"
+ enum:
+ - "NONE"
+ - "LZ4"
+ - "ZLIB"
+ - "ZSTD"
+ - "SNAPPY"
+ send_timeout_ms:
+ title: "Message send timeout"
+ description: "If a message is not acknowledged by a server before the send-timeout\
+ \ expires, an error occurs (in ms)."
+ type: "integer"
+ default: 30000
+ max_pending_messages:
+ title: "Max pending messages"
+ description: "The maximum size of a queue holding pending messages."
+ type: "integer"
+ default: 1000
+ max_pending_messages_across_partitions:
+ title: "Max pending messages across partitions"
+ description: "The maximum number of pending messages across partitions."
+ type: "integer"
+ default: 50000
+ batching_enabled:
+ title: "Enable batching"
+ description: "Control whether automatic batching of messages is enabled\
+ \ for the producer."
+ type: "boolean"
+ default: true
+ batching_max_messages:
+ title: "Batching max messages"
+ description: "Maximum number of messages permitted in a batch."
+ type: "integer"
+ default: 1000
+ batching_max_publish_delay:
+ title: "Batching max publish delay"
+ description: " Time period in milliseconds within which the messages sent\
+ \ will be batched."
+ type: "integer"
+ default: 1
+ block_if_queue_full:
+ title: "Block if queue is full"
+ description: "If the send operation should block when the outgoing message\
+ \ queue is full."
+ type: "boolean"
+ default: false
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+- dockerImage: "airbyte/destination-redshift:0.3.19"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Redshift Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ - "password"
+ - "schema"
+ additionalProperties: true
+ properties:
+ host:
+ description: "Host Endpoint of the Redshift Cluster (must include the cluster-id,\
+ \ region and end with .redshift.amazonaws.com)"
+ type: "string"
+ title: "Host"
+ port:
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 5439
+ examples:
+ - "5439"
+ title: "Port"
+ username:
+ description: "Username to use to access the database."
+ type: "string"
+ title: "Username"
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ title: "Password"
+ database:
+ description: "Name of the database."
+ type: "string"
+ title: "Database"
+ schema:
+ description: "The default schema tables are written to if the source does\
+ \ not specify a namespace. Unless specifically configured, the usual value\
+ \ for this field is \"public\"."
+ type: "string"
+ examples:
+ - "public"
+ default: "public"
+ title: "Default Schema"
+ s3_bucket_name:
+ title: "S3 Bucket Name"
+ type: "string"
+ description: "The name of the staging S3 bucket to use if utilising a COPY\
+ \ strategy. COPY is recommended for production workloads for better speed\
+ \ and scalability. See AWS docs for more details."
+ examples:
+ - "airbyte.staging"
+ s3_bucket_region:
+ title: "S3 Bucket Region"
+ type: "string"
+ default: ""
+ description: "The region of the S3 staging bucket to use if utilising a\
+ \ copy strategy."
+ enum:
+ - ""
+ - "us-east-1"
+ - "us-east-2"
+ - "us-west-1"
+ - "us-west-2"
+ - "af-south-1"
+ - "ap-east-1"
+ - "ap-south-1"
+ - "ap-northeast-1"
+ - "ap-northeast-2"
+ - "ap-northeast-3"
+ - "ap-southeast-1"
+ - "ap-southeast-2"
+ - "ca-central-1"
+ - "cn-north-1"
+ - "cn-northwest-1"
+ - "eu-central-1"
+ - "eu-north-1"
+ - "eu-south-1"
+ - "eu-west-1"
+ - "eu-west-2"
+ - "eu-west-3"
+ - "sa-east-1"
+ - "me-south-1"
+ access_key_id:
+ type: "string"
+ description: "The Access Key Id granting allow one to access the above S3\
+ \ staging bucket. Airbyte requires Read and Write permissions to the given\
+ \ bucket."
+ title: "S3 Key Id"
+ airbyte_secret: true
+ secret_access_key:
+ type: "string"
+ description: "The corresponding secret to the above access key id."
+ title: "S3 Access Key"
+ airbyte_secret: true
+ part_size:
+ type: "integer"
+ minimum: 10
+ maximum: 100
+ examples:
+ - "10"
+ description: "Optional. Increase this if syncing tables larger than 100GB.\
+ \ Only relevant for COPY. Files are streamed to S3 in parts. This determines\
+ \ the size of each part, in MBs. As S3 has a limit of 10,000 parts per\
+ \ file, part size affects the table size. This is 10MB by default, resulting\
+ \ in a default limit of 100GB tables. Note, a larger part size will result\
+ \ in larger memory requirements. A rule of thumb is to multiply the part\
+ \ size by 10 to get the memory requirement. Modify this with care."
+ title: "Stream Part Size"
+ supportsIncremental: true
+ supportsNormalization: true
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ - "append_dedup"
+- dockerImage: "airbyte/destination-s3:0.1.13"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "S3 Destination Spec"
+ type: "object"
+ required:
+ - "s3_bucket_name"
+ - "s3_bucket_path"
+ - "s3_bucket_region"
+ - "access_key_id"
+ - "secret_access_key"
+ - "format"
+ additionalProperties: false
+ properties:
+ s3_endpoint:
+ title: "Endpoint"
+ type: "string"
+ default: ""
+ description: "This is your S3 endpoint url.(if you are working with AWS\
+ \ S3, just leave empty)."
+ examples:
+ - "http://localhost:9000"
+ s3_bucket_name:
+ title: "S3 Bucket Name"
+ type: "string"
+ description: "The name of the S3 bucket."
+ examples:
+ - "airbyte_sync"
+ s3_bucket_path:
+ description: "Directory under the S3 bucket where data will be written."
+ type: "string"
+ examples:
+ - "data_sync/test"
+ s3_bucket_region:
+ title: "S3 Bucket Region"
+ type: "string"
+ default: ""
+ description: "The region of the S3 bucket."
+ enum:
+ - ""
+ - "us-east-1"
+ - "us-east-2"
+ - "us-west-1"
+ - "us-west-2"
+ - "af-south-1"
+ - "ap-east-1"
+ - "ap-south-1"
+ - "ap-northeast-1"
+ - "ap-northeast-2"
+ - "ap-northeast-3"
+ - "ap-southeast-1"
+ - "ap-southeast-2"
+ - "ca-central-1"
+ - "cn-north-1"
+ - "cn-northwest-1"
+ - "eu-central-1"
+ - "eu-north-1"
+ - "eu-south-1"
+ - "eu-west-1"
+ - "eu-west-2"
+ - "eu-west-3"
+ - "sa-east-1"
+ - "me-south-1"
+ - "us-gov-east-1"
+ - "us-gov-west-1"
+ access_key_id:
+ type: "string"
+ description: "The access key id to access the S3 bucket. Airbyte requires\
+ \ Read and Write permissions to the given bucket."
+ title: "S3 Key Id"
+ airbyte_secret: true
+ examples:
+ - "A012345678910EXAMPLE"
+ secret_access_key:
+ type: "string"
+ description: "The corresponding secret to the access key id."
+ title: "S3 Access Key"
+ airbyte_secret: true
+ examples:
+ - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
+ format:
+ title: "Output Format"
+ type: "object"
+ description: "Output data format"
+ oneOf:
+ - title: "Avro: Apache Avro"
+ required:
+ - "format_type"
+ - "compression_codec"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "Avro"
+ default: "Avro"
+ compression_codec:
+ title: "Compression Codec"
+ description: "The compression algorithm used to compress data. Default\
+ \ to no compression."
+ type: "object"
+ oneOf:
+ - title: "no compression"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "no compression"
+ default: "no compression"
+ - title: "Deflate"
+ required:
+ - "codec"
+ - "compression_level"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "Deflate"
+ default: "Deflate"
+ compression_level:
+ title: "Deflate level"
+ description: "0: no compression & fastest, 9: best compression\
+ \ & slowest."
+ type: "integer"
+ default: 0
+ minimum: 0
+ maximum: 9
+ - title: "bzip2"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "bzip2"
+ default: "bzip2"
+ - title: "xz"
+ required:
+ - "codec"
+ - "compression_level"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "xz"
+ default: "xz"
+ compression_level:
+ title: "Compression level"
+ description: "See here for details."
+ type: "integer"
+ default: 6
+ minimum: 0
+ maximum: 9
+ - title: "zstandard"
+ required:
+ - "codec"
+ - "compression_level"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "zstandard"
+ default: "zstandard"
+ compression_level:
+ title: "Compression level"
+ description: "Negative levels are 'fast' modes akin to lz4 or\
+ \ snappy, levels above 9 are generally for archival purposes,\
+ \ and levels above 18 use a lot of memory."
+ type: "integer"
+ default: 3
+ minimum: -5
+ maximum: 22
+ include_checksum:
+ title: "Include checksum"
+ description: "If true, include a checksum with each data block."
+ type: "boolean"
+ default: false
+ - title: "snappy"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "snappy"
+ default: "snappy"
+ part_size_mb:
+ title: "Block Size (MB) for Amazon S3 multipart upload"
+ description: "This is the size of a \"Part\" being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will allow\
+ \ to upload a bigger files and improve the speed, but consumes9\
+ \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
+ type: "integer"
+ default: 5
+ examples:
+ - 5
+ - title: "CSV: Comma-Separated Values"
+ required:
+ - "format_type"
+ - "flattening"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "CSV"
+ default: "CSV"
+ flattening:
+ type: "string"
+ title: "Normalization (Flattening)"
+ description: "Whether the input json data should be normalized (flattened)\
+ \ in the output CSV. Please refer to docs for details."
+ default: "No flattening"
+ enum:
+ - "No flattening"
+ - "Root level flattening"
+ part_size_mb:
+ title: "Block Size (MB) for Amazon S3 multipart upload"
+ description: "This is the size of a \"Part\" being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will allow\
+ \ to upload a bigger files and improve the speed, but consumes9\
+ \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
+ type: "integer"
+ default: 5
+ examples:
+ - 5
+ - title: "JSON Lines: newline-delimited JSON"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "JSONL"
+ default: "JSONL"
+ part_size_mb:
+ title: "Block Size (MB) for Amazon S3 multipart upload"
+ description: "This is the size of a \"Part\" being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will allow\
+ \ to upload a bigger files and improve the speed, but consumes9\
+ \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
+ type: "integer"
+ default: 5
+ examples:
+ - 5
+ - title: "Parquet: Columnar Storage"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "Parquet"
+ default: "Parquet"
+ compression_codec:
+ title: "Compression Codec"
+ description: "The compression algorithm used to compress data pages."
+ type: "string"
+ enum:
+ - "UNCOMPRESSED"
+ - "SNAPPY"
+ - "GZIP"
+ - "LZO"
+ - "BROTLI"
+ - "LZ4"
+ - "ZSTD"
+ default: "UNCOMPRESSED"
+ block_size_mb:
+ title: "Block Size (Row Group Size) (MB)"
+ description: "This is the size of a row group being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will improve\
+ \ the IO when reading, but consume more memory when writing. Default:\
+ \ 128 MB."
+ type: "integer"
+ default: 128
+ examples:
+ - 128
+ max_padding_size_mb:
+ title: "Max Padding Size (MB)"
+ description: "Maximum size allowed as padding to align row groups.\
+ \ This is also the minimum size of a row group. Default: 8 MB."
+ type: "integer"
+ default: 8
+ examples:
+ - 8
+ page_size_kb:
+ title: "Page Size (KB)"
+ description: "The page size is for compression. A block is composed\
+ \ of pages. A page is the smallest unit that must be read fully\
+ \ to access a single record. If this value is too small, the compression\
+ \ will deteriorate. Default: 1024 KB."
+ type: "integer"
+ default: 1024
+ examples:
+ - 1024
+ dictionary_page_size_kb:
+ title: "Dictionary Page Size (KB)"
+ description: "There is one dictionary page per column per row group\
+ \ when dictionary encoding is used. The dictionary page size works\
+ \ like the page size but for dictionary. Default: 1024 KB."
+ type: "integer"
+ default: 1024
+ examples:
+ - 1024
+ dictionary_encoding:
+ title: "Dictionary Encoding"
+ description: "Default: true."
+ type: "boolean"
+ default: true
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-snowflake:0.3.16"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Snowflake Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "role"
+ - "warehouse"
+ - "database"
+ - "schema"
+ - "username"
+ - "password"
+ additionalProperties: true
+ properties:
+ host:
+ description: "Host domain of the snowflake instance (must include the account,\
+ \ region, cloud environment, and end with snowflakecomputing.com)."
+ examples:
+ - "accountname.us-east-2.aws.snowflakecomputing.com"
+ type: "string"
+ title: "Host"
+ order: 0
+ role:
+ description: "The role you created for Airbyte to access Snowflake."
+ examples:
+ - "AIRBYTE_ROLE"
+ type: "string"
+ title: "Role"
+ order: 1
+ warehouse:
+ description: "The warehouse you created for Airbyte to sync data into."
+ examples:
+ - "AIRBYTE_WAREHOUSE"
+ type: "string"
+ title: "Warehouse"
+ order: 2
+ database:
+ description: "The database you created for Airbyte to sync data into."
+ examples:
+ - "AIRBYTE_DATABASE"
+ type: "string"
+ title: "Database"
+ order: 3
+ schema:
+ description: "The default Snowflake schema tables are written to if the\
+ \ source does not specify a namespace."
+ examples:
+ - "AIRBYTE_SCHEMA"
+ type: "string"
+ title: "Default Schema"
+ order: 4
+ username:
+ description: "The username you created to allow Airbyte to access the database."
+ examples:
+ - "AIRBYTE_USER"
+ type: "string"
+ title: "Username"
+ order: 5
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ title: "Password"
+ order: 6
+ loading_method:
+ type: "object"
+ title: "Loading Method"
+ description: "Loading method used to send data to Snowflake."
+ order: 7
+ oneOf:
+ - title: "Standard Inserts"
+ additionalProperties: false
+ description: "Uses
INSERT
statements to send batches of records\
+ \ to Snowflake. Easiest (no setup) but not recommended for large production\
+ \ workloads due to slow speed."
+ required:
+ - "method"
+ properties:
+ method:
+ type: "string"
+ enum:
+ - "Standard"
+ default: "Standard"
+ - title: "AWS S3 Staging"
+ additionalProperties: false
+ description: "Writes large batches of records to a file, uploads the file\
+ \ to S3, then uses
COPY INTO table
to upload the file. Recommended\
+ \ for large production workloads for better speed and scalability."
+ required:
+ - "method"
+ - "s3_bucket_name"
+ - "access_key_id"
+ - "secret_access_key"
+ properties:
+ method:
+ type: "string"
+ enum:
+ - "S3 Staging"
+ default: "S3 Staging"
+ order: 0
+ s3_bucket_name:
+ title: "S3 Bucket Name"
+ type: "string"
+ description: "The name of the staging S3 bucket. Airbyte will write\
+ \ files to this bucket and read them via
COPY
statements\
+ \ on Snowflake."
+ examples:
+ - "airbyte.staging"
+ order: 1
+ s3_bucket_region:
+ title: "S3 Bucket Region"
+ type: "string"
+ default: ""
+ description: "The region of the S3 staging bucket to use if utilising\
+ \ a copy strategy."
+ enum:
+ - ""
+ - "us-east-1"
+ - "us-east-2"
+ - "us-west-1"
+ - "us-west-2"
+ - "af-south-1"
+ - "ap-east-1"
+ - "ap-south-1"
+ - "ap-northeast-1"
+ - "ap-northeast-2"
+ - "ap-northeast-3"
+ - "ap-southeast-1"
+ - "ap-southeast-2"
+ - "ca-central-1"
+ - "cn-north-1"
+ - "cn-northwest-1"
+ - "eu-central-1"
+ - "eu-west-1"
+ - "eu-west-2"
+ - "eu-west-3"
+ - "eu-south-1"
+ - "eu-north-1"
+ - "sa-east-1"
+ - "me-south-1"
+ order: 2
+ access_key_id:
+ type: "string"
+ description: "The Access Key Id granting allow one to access the above\
+ \ S3 staging bucket. Airbyte requires Read and Write permissions\
+ \ to the given bucket."
+ title: "S3 Key Id"
+ airbyte_secret: true
+ order: 3
+ secret_access_key:
+ type: "string"
+ description: "The corresponding secret to the above access key id."
+ title: "S3 Access Key"
+ airbyte_secret: true
+ order: 4
+ - title: "GCS Staging"
+ additionalProperties: false
+ description: "Writes large batches of records to a file, uploads the file\
+ \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\
+ \ for large production workloads for better speed and scalability."
+ required:
+ - "method"
+ - "project_id"
+ - "bucket_name"
+ - "credentials_json"
+ properties:
+ method:
+ type: "string"
+ enum:
+ - "GCS Staging"
+ default: "GCS Staging"
+ order: 0
+ project_id:
+ title: "GCP Project ID"
+ type: "string"
+ description: "The name of the GCP project ID for your credentials."
+ examples:
+ - "my-project"
+ order: 1
+ bucket_name:
+ title: "GCS Bucket Name"
+ type: "string"
+ description: "The name of the staging GCS bucket. Airbyte will write\
+ \ files to this bucket and read them via
COPY
statements\
+ \ on Snowflake."
+ examples:
+ - "airbyte-staging"
+ order: 2
+ credentials_json:
+ title: "Google Application Credentials"
+ type: "string"
+ description: "The contents of the JSON key file that has read/write\
+ \ permissions to the staging GCS bucket. You will separately need\
+ \ to grant bucket access to your Snowflake GCP service account.\
+ \ See the GCP docs for more information on how to generate a JSON key\
+ \ for your service account."
+ airbyte_secret: true
+ multiline: true
+ order: 3
+ supportsIncremental: true
+ supportsNormalization: true
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ - "append_dedup"
diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml
index 3e91d0e60c8bb..4597a82ddecbc 100644
--- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml
+++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml
@@ -131,14 +131,14 @@
- name: Facebook Marketing
sourceDefinitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c
dockerRepository: airbyte/source-facebook-marketing
- dockerImageTag: 0.2.21
+ dockerImageTag: 0.2.22
documentationUrl: https://docs.airbyte.io/integrations/sources/facebook-marketing
icon: facebook.svg
sourceType: api
- name: Facebook Pages
sourceDefinitionId: 010eb12f-837b-4685-892d-0a39f76a98f5
dockerRepository: airbyte/source-facebook-pages
- dockerImageTag: 0.1.2
+ dockerImageTag: 0.1.3
documentationUrl: https://hub.docker.com/r/airbyte/source-facebook-pages
icon: facebook.svg
sourceType: api
@@ -156,6 +156,12 @@
documentationUrl: https://docs.airbyte.io/integrations/sources/freshdesk
icon: freshdesk.svg
sourceType: api
+- name: Freshsales
+ sourceDefinitionId: eca08d79-7b92-4065-b7f3-79c14836ebe7
+ dockerRepository: airbyte/source-freshsales
+ dockerImageTag: 0.1.0
+ documentationUrl: https://docs.airbyte.io/integrations/sources/freshsales
+ sourceType: api
- name: Freshservice
sourceDefinitionId: 9bb85338-ea95-4c93-b267-6be89125b267
dockerRepository: airbyte/source-freshservice
@@ -192,7 +198,7 @@
- name: Google Directory
sourceDefinitionId: d19ae824-e289-4b14-995a-0632eb46d246
dockerRepository: airbyte/source-google-directory
- dockerImageTag: 0.1.5
+ dockerImageTag: 0.1.8
documentationUrl: https://docs.airbyte.io/integrations/sources/google-directory
sourceType: api
- name: Google Search Console
@@ -217,7 +223,7 @@
- name: Greenhouse
sourceDefinitionId: 59f1e50a-331f-4f09-b3e8-2e8d4d355f44
dockerRepository: airbyte/source-greenhouse
- dockerImageTag: 0.2.5
+ dockerImageTag: 0.2.6
documentationUrl: https://docs.airbyte.io/integrations/sources/greenhouse
icon: greenhouse.svg
sourceType: api
@@ -230,7 +236,7 @@
- name: Hubspot
sourceDefinitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c
dockerRepository: airbyte/source-hubspot
- dockerImageTag: 0.1.21
+ dockerImageTag: 0.1.23
documentationUrl: https://docs.airbyte.io/integrations/sources/hubspot
icon: hubspot.svg
sourceType: api
@@ -256,7 +262,7 @@
- name: Iterable
sourceDefinitionId: 2e875208-0c0b-4ee4-9e92-1cb3156ea799
dockerRepository: airbyte/source-iterable
- dockerImageTag: 0.1.9
+ dockerImageTag: 0.1.11
documentationUrl: https://docs.airbyte.io/integrations/sources/iterable
sourceType: api
- name: Jira
@@ -328,10 +334,16 @@
- name: Mixpanel
sourceDefinitionId: 12928b32-bf0a-4f1e-964f-07e12e37153a
dockerRepository: airbyte/source-mixpanel
- dockerImageTag: 0.1.1
+ dockerImageTag: 0.1.3
documentationUrl: https://docs.airbyte.io/integrations/sources/mixpanel
icon: mixpanel.svg
sourceType: api
+- name: Monday
+ sourceDefinitionId: 80a54ea2-9959-4040-aac1-eee42423ec9b
+ dockerRepository: airbyte/source-monday
+ dockerImageTag: 0.1.0
+ documentationUrl: https://docs.airbyte.io/integrations/sources/monday
+ sourceType: api
- name: MongoDb
sourceDefinitionId: b2e713cd-cc36-4c0a-b5bd-b47cb8a0561e
dockerRepository: airbyte/source-mongodb-v2
@@ -342,14 +354,14 @@
- name: MySQL
sourceDefinitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad
dockerRepository: airbyte/source-mysql
- dockerImageTag: 0.4.8
+ dockerImageTag: 0.4.9
documentationUrl: https://docs.airbyte.io/integrations/sources/mysql
icon: mysql.svg
sourceType: database
- name: Okta
sourceDefinitionId: 1d4fdb25-64fc-4569-92da-fcdca79a8372
dockerRepository: airbyte/source-okta
- dockerImageTag: 0.1.2
+ dockerImageTag: 0.1.4
documentationUrl: https://docs.airbyte.io/integrations/sources/okta
sourceType: api
- name: OneSignal
@@ -405,7 +417,7 @@
- name: Postgres
sourceDefinitionId: decd338e-5647-4c0b-adf4-da0e75f5a750
dockerRepository: airbyte/source-postgres
- dockerImageTag: 0.3.11
+ dockerImageTag: 0.3.13
documentationUrl: https://docs.airbyte.io/integrations/sources/postgres
icon: postgresql.svg
sourceType: database
@@ -424,7 +436,7 @@
- name: Recharge
sourceDefinitionId: 45d2e135-2ede-49e1-939f-3e3ec357a65e
dockerRepository: airbyte/source-recharge
- dockerImageTag: 0.1.3
+ dockerImageTag: 0.1.4
documentationUrl: https://docs.airbyte.io/integrations/sources/recharge
sourceType: api
- name: Recurly
@@ -462,7 +474,7 @@
- name: Salesforce
sourceDefinitionId: b117307c-14b6-41aa-9422-947e34922962
dockerRepository: airbyte/source-salesforce
- dockerImageTag: 0.1.2
+ dockerImageTag: 0.1.3
documentationUrl: https://docs.airbyte.io/integrations/sources/salesforce
icon: salesforce.svg
sourceType: api
@@ -476,7 +488,7 @@
- name: Shopify
sourceDefinitionId: 9da77001-af33-4bcd-be46-6252bf9342b9
dockerRepository: airbyte/source-shopify
- dockerImageTag: 0.1.21
+ dockerImageTag: 0.1.22
documentationUrl: https://docs.airbyte.io/integrations/sources/shopify
sourceType: api
- name: Short.io
@@ -524,7 +536,7 @@
- name: Stripe
sourceDefinitionId: e094cb9a-26de-4645-8761-65c0c425d1de
dockerRepository: airbyte/source-stripe
- dockerImageTag: 0.1.21
+ dockerImageTag: 0.1.22
documentationUrl: https://docs.airbyte.io/integrations/sources/stripe
icon: stripe.svg
sourceType: api
@@ -586,7 +598,7 @@
- name: Zendesk Support
sourceDefinitionId: 79c1aa37-dae3-42ae-b333-d1c105477715
dockerRepository: airbyte/source-zendesk-support
- dockerImageTag: 0.1.3
+ dockerImageTag: 0.1.4
documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-support
icon: zendesk.svg
sourceType: api
@@ -596,6 +608,11 @@
dockerImageTag: 0.1.2
documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-talk
sourceType: api
+- sourceDefinitionId: cdaf146a-9b75-49fd-9dd2-9d64a0bb4781
+ name: Sentry
+ dockerRepository: airbyte/source-sentry
+ dockerImageTag: 0.1.0
+ documentationUrl: https://docs.airbyte.io/integrations/sources/sentry
- name: Zoom
sourceDefinitionId: aea2fd0d-377d-465e-86c0-4fdc4f688e51
dockerRepository: airbyte/source-zoom-singer
diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml
new file mode 100644
index 0000000000000..e526ee27f5c3f
--- /dev/null
+++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml
@@ -0,0 +1,6016 @@
+# This file is generated by io.airbyte.config.specs.SeedConnectorSpecGenerator.
+# Do NOT edit this file directly. See generator class for more details.
+---
+- dockerImage: "airbyte/source-aws-cloudtrail:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/aws-cloudtrail"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Aws CloudTrail Spec"
+ type: "object"
+ required:
+ - "aws_key_id"
+ - "aws_secret_key"
+ - "aws_region_name"
+ - "start_date"
+ additionalProperties: true
+ properties:
+ aws_key_id:
+ type: "string"
+ description: "Specifies an AWS access key associated with an IAM user or\
+ \ role."
+ airbyte_secret: true
+ aws_secret_key:
+ type: "string"
+ description: "Specifies the secret key associated with the access key. This\
+ \ is essentially the 'password' for the access key."
+ airbyte_secret: true
+ aws_region_name:
+ type: "string"
+ description: "The default AWS Region to use, for example, us-west-1 or us-west-2.\
+ \ When specifying a Region inline during client initialization, this property\
+ \ is named region_name."
+ start_date:
+ type: "string"
+ description: "The date you would like to replicate data. Data in ClouTraid\
+ \ is available for last 90 days only. Format: YYYY-MM-DD."
+ examples:
+ - "2021-01-01"
+ default: "1970-01-01"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-amazon-ads:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/amazon-ads"
+ connectionSpecification:
+ title: "Amazon Ads Spec"
+ type: "object"
+ properties:
+ client_id:
+ title: "Client Id"
+ description: "Oauth client id How to create your Login with Amazon"
+ name: "Client ID"
+ type: "string"
+ client_secret:
+ title: "Client Secret"
+ description: "Oauth client secret How to create your Login with Amazon"
+ name: "Client secret"
+ airbyte_secret: true
+ type: "string"
+ scope:
+ title: "Scope"
+ description: "By default its advertising::campaign_management, but customers\
+ \ may need to set scope to cpc_advertising:campaign_management."
+ default: "advertising::campaign_management"
+ name: "Client scope"
+ examples:
+ - "cpc_advertising:campaign_management"
+ type: "string"
+ refresh_token:
+ title: "Refresh Token"
+ description: "Oauth 2.0 refresh_token, read details here"
+ name: "Oauth refresh token"
+ airbyte_secret: true
+ type: "string"
+ start_date:
+ title: "Start Date"
+ description: "Start date for collectiong reports, should not be more than\
+ \ 60 days in past. In YYYY-MM-DD format"
+ name: "Start date"
+ examples:
+ - "2022-10-10"
+ - "2022-10-22"
+ type: "string"
+ region:
+ description: "Region to pull data from (EU/NA/FE/SANDBOX)"
+ default: "NA"
+ name: "Region"
+ title: "AmazonAdsRegion"
+ enum:
+ - "NA"
+ - "EU"
+ - "FE"
+ - "SANDBOX"
+ type: "string"
+ profiles:
+ title: "Profiles"
+ description: "profile Ids you want to fetch data for"
+ name: "Profile Ids"
+ type: "array"
+ items:
+ type: "integer"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-amazon-seller-partner:0.2.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/amazon-seller-partner"
+ changelogUrl: "https://docs.airbyte.io/integrations/sources/amazon-seller-partner"
+ connectionSpecification:
+ title: "Amazon Seller Partner Spec"
+ type: "object"
+ properties:
+ replication_start_date:
+ title: "Replication Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2017-01-25T00:00:00Z"
+ type: "string"
+ refresh_token:
+ title: "Refresh Token"
+ description: "The refresh token used obtained via authorization (can be\
+ \ passed to the client instead)"
+ airbyte_secret: true
+ type: "string"
+ lwa_app_id:
+ title: "Lwa App Id"
+ description: "Your login with amazon app id"
+ airbyte_secret: true
+ type: "string"
+ lwa_client_secret:
+ title: "Lwa Client Secret"
+ description: "Your login with amazon client secret"
+ airbyte_secret: true
+ type: "string"
+ aws_access_key:
+ title: "Aws Access Key"
+ description: "AWS user access key"
+ airbyte_secret: true
+ type: "string"
+ aws_secret_key:
+ title: "Aws Secret Key"
+ description: "AWS user secret key"
+ airbyte_secret: true
+ type: "string"
+ role_arn:
+ title: "Role Arn"
+ description: "The role's arn (needs permission to 'Assume Role' STS)"
+ airbyte_secret: true
+ type: "string"
+ aws_environment:
+ title: "AWSEnvironment"
+ description: "An enumeration."
+ enum:
+ - "PRODUCTION"
+ - "SANDBOX"
+ type: "string"
+ region:
+ title: "AWSRegion"
+ description: "An enumeration."
+ enum:
+ - "AE"
+ - "DE"
+ - "PL"
+ - "EG"
+ - "ES"
+ - "FR"
+ - "IN"
+ - "IT"
+ - "NL"
+ - "SA"
+ - "SE"
+ - "TR"
+ - "UK"
+ - "AU"
+ - "JP"
+ - "SG"
+ - "US"
+ - "BR"
+ - "CA"
+ - "MX"
+ - "GB"
+ type: "string"
+ required:
+ - "replication_start_date"
+ - "refresh_token"
+ - "lwa_app_id"
+ - "lwa_client_secret"
+ - "aws_access_key"
+ - "aws_secret_key"
+ - "role_arn"
+ - "aws_environment"
+ - "region"
+ definitions:
+ AWSEnvironment:
+ title: "AWSEnvironment"
+ description: "An enumeration."
+ enum:
+ - "PRODUCTION"
+ - "SANDBOX"
+ type: "string"
+ AWSRegion:
+ title: "AWSRegion"
+ description: "An enumeration."
+ enum:
+ - "AE"
+ - "DE"
+ - "PL"
+ - "EG"
+ - "ES"
+ - "FR"
+ - "IN"
+ - "IT"
+ - "NL"
+ - "SA"
+ - "SE"
+ - "TR"
+ - "UK"
+ - "AU"
+ - "JP"
+ - "SG"
+ - "US"
+ - "BR"
+ - "CA"
+ - "MX"
+ - "GB"
+ type: "string"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-amplitude:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/amplitude"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Amplitude Spec"
+ type: "object"
+ required:
+ - "api_key"
+ - "secret_key"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ api_key:
+ type: "string"
+ description: "This is the project’s API key, used for calling Amplitude’\
+ s APIs"
+ airbyte_secret: true
+ secret_key:
+ type: "string"
+ description: "This is the project's secret key, which is also used for calling\
+ \ Amplitude’s APIs"
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ description: "UTC date and time in the format 2021-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ examples:
+ - "2021-01-25T00:00:00Z"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-apify-dataset:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/apify-dataset"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Apify Dataset Spec"
+ type: "object"
+ required:
+ - "datasetId"
+ additionalProperties: false
+ properties:
+ datasetId:
+ type: "string"
+ description: "ID of the dataset you would like to load to Airbyte."
+ clean:
+ type: "boolean"
+ description: "If set to true, only clean items will be downloaded from the\
+ \ dataset. See description of what clean means in Apify API docs. If not sure, set clean to false."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-appstore-singer:0.2.4"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/appstore"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Appstore Singer Spec"
+ type: "object"
+ required:
+ - "key_id"
+ - "private_key"
+ - "issuer_id"
+ - "vendor"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ key_id:
+ type: "string"
+ description: "Key_id is the API key you use to connect to appstore's API."
+ private_key:
+ type: "string"
+ description: "Private_key is the contents of the key file you use to connect to appstore's API."
+ airbyte_secret: true
+ multiline: true
+ issuer_id:
+ type: "string"
+ description: "Issuer_id is used to generate the credentials to connect to appstore's\
+ \ API."
+ vendor:
+ type: "string"
+ description: "This is the Apple ID of your account."
+ start_date:
+ type: "string"
+ description: "Date from which to start pulling data."
+ examples:
+ - "2020-11-16T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-asana:0.1.3"
+ spec:
+ documentationUrl: "https://docsurl.com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Asana Spec"
+ type: "object"
+ additionalProperties: true
+ properties:
+ credentials:
+ title: "Authentication mechanism"
+ description: "Choose how to authenticate to Github"
+ type: "object"
+ oneOf:
+ - type: "object"
+ title: "Authenticate with Personal Access Token"
+ required:
+ - "personal_access_token"
+ properties:
+ option_title:
+ type: "string"
+ title: "Credentials title"
+ description: "PAT Credentials"
+ const: "PAT Credentials"
+ personal_access_token:
+ type: "string"
+ title: "Personal Access Token"
+ description: "Asana Personal Access Token (generate yours here)."
+ airbyte_secret: true
+ - type: "object"
+ title: "Authenticate via Asana (Oauth)"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ properties:
+ option_title:
+ type: "string"
+ title: "Credentials title"
+ description: "OAuth Credentials"
+ const: "OAuth Credentials"
+ client_id:
+ type: "string"
+ title: ""
+ description: ""
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ title: ""
+ description: ""
+ airbyte_secret: true
+ refresh_token:
+ type: "string"
+ title: ""
+ description: ""
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject:
+ - "credentials"
+ - "1"
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ oauthFlowOutputParameters:
+ - - "refresh_token"
+- dockerImage: "airbyte/source-bamboo-hr:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/bamboo-hr"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Bamboo HR Spec"
+ type: "object"
+ required:
+ - "subdomain"
+ - "api_key"
+ additionalProperties: false
+ properties:
+ subdomain:
+ type: "string"
+ description: "Sub Domain of bamboo hr"
+ api_key:
+ type: "string"
+ description: "Api key of bamboo hr"
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-bigcommerce:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/bigcommerce"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "BigCommerce Source CDK Specifications"
+ type: "object"
+ required:
+ - "start_date"
+ - "store_hash"
+ - "access_token"
+ additionalProperties: false
+ properties:
+ start_date:
+ type: "string"
+ description: "The date you would like to replicate data. Format: YYYY-MM-DD."
+ examples:
+ - "2021-01-01"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ store_hash:
+ type: "string"
+ description: "The hash code of the store. For https://api.bigcommerce.com/stores/HASH_CODE/v3/,\
+ \ The store's hash code is 'HASH_CODE'."
+ access_token:
+ type: "string"
+ description: "The API Access Token."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-bigquery:0.1.4"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/source/bigquery"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "BigQuery Source Spec"
+ type: "object"
+ required:
+ - "project_id"
+ - "credentials_json"
+ additionalProperties: false
+ properties:
+ project_id:
+ type: "string"
+ description: "The GCP project ID for the project containing the target BigQuery\
+ \ dataset."
+ title: "Project ID"
+ dataset_id:
+ type: "string"
+ description: "The BigQuery Dataset ID to look for tables to replicate from."
+ title: "Default Dataset ID"
+ credentials_json:
+ type: "string"
+ description: "The contents of the JSON service account key. Check out the\
+ \ docs\
+ \ if you need help generating this key."
+ title: "Credentials JSON"
+ airbyte_secret: true
+ supportsIncremental: true
+ supportsNormalization: true
+ supportsDBT: true
+ supported_destination_sync_modes: []
+ supported_sync_modes:
+ - "overwrite"
+ - "append"
+ - "append_dedup"
+- dockerImage: "airbyte/source-bing-ads:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/bing-ads"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Bing Ads Spec"
+ type: "object"
+ required:
+ - "accounts"
+ - "client_id"
+ - "client_secret"
+ - "customer_id"
+ - "developer_token"
+ - "refresh_token"
+ - "user_id"
+ - "reports_start_date"
+ - "hourly_reports"
+ - "daily_reports"
+ - "weekly_reports"
+ - "monthly_reports"
+ additionalProperties: false
+ properties:
+ accounts:
+ title: "Accounts"
+ type: "object"
+ description: "Account selection strategy."
+ oneOf:
+ - title: "All accounts assigned to your user"
+ additionalProperties: false
+ description: "Fetch data for all available accounts."
+ required:
+ - "selection_strategy"
+ properties:
+ selection_strategy:
+ type: "string"
+ enum:
+ - "all"
+ const: "all"
+ - title: "Subset of your accounts"
+ additionalProperties: false
+ description: "Fetch data for subset of account ids."
+ required:
+ - "ids"
+ - "selection_strategy"
+ properties:
+ selection_strategy:
+ type: "string"
+ enum:
+ - "subset"
+ const: "subset"
+ ids:
+ type: "array"
+ description: "List of accounts from which data will be fetched."
+ items:
+ type: "string"
+ minItems: 1
+ uniqueItems: true
+ client_id:
+ type: "string"
+ description: "ID of your Microsoft Advertising client application."
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ description: "Secret of your Microsoft Advertising client application."
+ airbyte_secret: true
+ customer_id:
+ type: "string"
+ description: "User's customer ID."
+ developer_token:
+ type: "string"
+ description: "Developer token associated with user."
+ airbyte_secret: true
+ refresh_token:
+ type: "string"
+ description: "The long-lived Refresh token received via grant_type=refresh_token\
+ \ request."
+ airbyte_secret: true
+ user_id:
+ type: "string"
+ description: "Unique user identifier."
+ reports_start_date:
+ type: "string"
+ format: "date"
+ default: "2020-01-01"
+ description: "From which date perform initial sync for report related streams.\
+ \ In YYYY-MM-DD format"
+ hourly_reports:
+ title: "Hourly reports"
+ type: "boolean"
+ description: "The report data will be aggregated by each hour of the day."
+ default: false
+ daily_reports:
+ title: "Daily reports"
+ type: "boolean"
+ description: "The report data will be aggregated by each day."
+ default: false
+ weekly_reports:
+ title: "Weekly reports"
+ type: "boolean"
+ description: "The report data will be aggregated by each week running from\
+ \ Sunday through Saturday."
+ default: false
+ monthly_reports:
+ title: "Monthly reports"
+ type: "boolean"
+ description: "The report data will be aggregated by each month."
+ default: false
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-braintree:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/braintree"
+ connectionSpecification:
+ title: "Braintree Spec"
+ type: "object"
+ properties:
+ merchant_id:
+ title: "Merchant Id"
+ description: "Merchant ID is the unique identifier for entire gateway account."
+ name: "Merchant ID"
+ type: "string"
+ public_key:
+ title: "Public Key"
+ description: "This is your user-specific public identifier for Braintree."
+ name: "Public key"
+ type: "string"
+ private_key:
+ title: "Private Key"
+ description: "This is your user-specific private identifier."
+ name: "Private Key"
+ airbyte_secret: true
+ type: "string"
+ start_date:
+ title: "Start Date"
+ description: "The date from which you'd like to replicate data for Braintree\
+ \ API for UTC timezone, All data generated after this date will be replicated."
+ name: "Start date"
+ examples:
+ - "2020"
+ - "2020-12-30"
+ - "2020-11-22 20:20:05"
+ type: "string"
+ format: "date-time"
+ environment:
+ description: "Environment specifies where the data will come from."
+ name: "Environment"
+ examples:
+ - "sandbox"
+ - "production"
+ - "qa"
+ - "development"
+ allOf:
+ - $ref: "#/definitions/Environment"
+ required:
+ - "merchant_id"
+ - "public_key"
+ - "private_key"
+ - "environment"
+ definitions:
+ Environment:
+ title: "Environment"
+ description: "An enumeration."
+ enum:
+ - "Development"
+ - "Sandbox"
+ - "Qa"
+ - "Production"
+ type: "string"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-cart:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/cart"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Cart Spec"
+ type: "object"
+ required:
+ - "access_token"
+ - "start_date"
+ - "store_name"
+ additionalProperties: true
+ properties:
+ access_token:
+ type: "string"
+ airbyte_secret: true
+ description: "API Key. See the docs for information on how to generate this key."
+ store_name:
+ type: "string"
+ description: "Store name. All API URLs start with https://[mystorename.com]/api/v1/,\
+ \ where [mystorename.com] is the domain name of your store."
+ start_date:
+ title: "Start Date"
+ type: "string"
+ description: "The date from which you'd like to replicate the data"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2021-01-01T00:00:00Z"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-chargebee:0.1.4"
+ spec:
+ documentationUrl: "https://apidocs.chargebee.com/docs/api"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Chargebee Spec"
+ type: "object"
+ required:
+ - "site"
+ - "site_api_key"
+ - "start_date"
+ - "product_catalog"
+ additionalProperties: false
+ properties:
+ site:
+ type: "string"
+ title: "Site"
+ description: "The site prefix for your Chargebee instance."
+ examples:
+ - "airbyte-test"
+ site_api_key:
+ type: "string"
+ title: "API Key"
+ description: "The API key from your Chargebee instance."
+ examples:
+ - "test_3yzfanAXF66USdWC9wQcM555DQJkSYoppu"
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ title: "Start Date"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ description: "UTC date and time in the format 2021-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ examples:
+ - "2021-01-25T00:00:00Z"
+ product_catalog:
+ title: "Product Catalog"
+ type: "string"
+ description: "Product Catalog version of your Chargebee site. Instructions\
+ \ on how to find your version you may find here under `API Version` section."
+ enum:
+ - "1.0"
+ - "2.0"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-clickhouse:0.1.4"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/clickhouse"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "ClickHouse Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ additionalProperties: false
+ properties:
+ host:
+ description: "Host Endpoint of the Clickhouse Cluster"
+ type: "string"
+ port:
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 8123
+ examples:
+ - "8123"
+ database:
+ description: "Name of the database."
+ type: "string"
+ examples:
+ - "default"
+ username:
+ description: "Username to use to access the database."
+ type: "string"
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ ssl:
+ title: "SSL Connection"
+ description: "Encrypt data using SSL."
+ type: "boolean"
+ default: true
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-close-com:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/close-com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Close.com Spec"
+ type: "object"
+ required:
+ - "api_key"
+ additionalProperties: false
+ properties:
+ api_key:
+ type: "string"
+ description: "Close.com API key (usually starts with 'api_'; find yours\
+ \ here)."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ description: "The start date to sync data. Leave blank for full sync. Format:\
+ \ YYYY-MM-DD."
+ examples:
+ - "2021-01-01"
+ default: "2021-01-01"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-cockroachdb:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/postgres"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Cockroach Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ additionalProperties: false
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 5432
+ examples:
+ - "5432"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ order: 3
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ ssl:
+ title: "Connect using SSL"
+ description: "Encrypt client/server communications for increased security."
+ type: "boolean"
+ default: false
+ order: 5
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-delighted:0.1.0"
+ spec:
+ documentationUrl: "https://docsurl.com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Delighted Spec"
+ type: "object"
+ required:
+ - "since"
+ - "api_key"
+ additionalProperties: false
+ properties:
+ since:
+ type: "integer"
+ description: "An Unix timestamp to retrieve records created on or after\
+ \ this time."
+ examples:
+ - 1625328167
+ api_key:
+ type: "string"
+ description: "A Delighted API key."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-dixa:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/dixa"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Dixa Spec"
+ type: "object"
+ required:
+ - "api_token"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ api_token:
+ type: "string"
+ description: "Dixa API token"
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ description: "The connector pulls records updated from this date onwards."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ examples:
+ - "YYYY-MM-DD"
+ batch_size:
+ type: "integer"
+ description: "Number of days to batch into one request. Max 31."
+ pattern: "^[0-9]{1,2}$"
+ examples:
+ - 1
+ - 31
+ default: 31
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-drift:0.2.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/drift"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Drift Spec"
+ type: "object"
+ required:
+ - "access_token"
+ additionalProperties: false
+ properties:
+ access_token:
+ type: "string"
+ description: "Drift Access Token. See the docs for more information on how to generate this key."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-exchange-rates:0.2.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/exchangeratesapi"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "ratesapi.io Source Spec"
+ type: "object"
+ required:
+ - "start_date"
+ - "access_key"
+ additionalProperties: false
+ properties:
+ start_date:
+ type: "string"
+ description: "Start getting data from that date."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ examples:
+ - "YYYY-MM-DD"
+ access_key:
+ type: "string"
+ description: "Your API Access Key. See here. The key is case sensitive."
+ airbyte_secret: true
+ base:
+ type: "string"
+ description: "ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default\
+ \ base currency is EUR"
+ examples:
+ - "EUR"
+ - "USD"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-facebook-marketing:0.2.22"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/facebook-marketing"
+ changelogUrl: "https://docs.airbyte.io/integrations/sources/facebook-marketing"
+ connectionSpecification:
+ title: "Source Facebook Marketing"
+ type: "object"
+ properties:
+ account_id:
+ title: "Account Id"
+ description: "The Facebook Ad account ID to use when pulling data from the\
+ \ Facebook Marketing API."
+ type: "string"
+ access_token:
+ title: "Access Token"
+ description: "The value of the access token generated. See the docs\
+ \ for more information"
+ airbyte_secret: true
+ type: "string"
+ start_date:
+ title: "Start Date"
+ description: "The date from which you'd like to replicate data for AdCreatives\
+ \ and AdInsights APIs, in the format YYYY-MM-DDT00:00:00Z. All data generated\
+ \ after this date will be replicated."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2017-01-25T00:00:00Z"
+ type: "string"
+ format: "date-time"
+ end_date:
+ title: "End Date"
+ description: "The date until which you'd like to replicate data for AdCreatives\
+ \ and AdInsights APIs, in the format YYYY-MM-DDT00:00:00Z. All data generated\
+ \ between start_date and this date will be replicated. Not setting this\
+ \ option will result in always syncing the latest data."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2017-01-26T00:00:00Z"
+ type: "string"
+ format: "date-time"
+ include_deleted:
+ title: "Include Deleted"
+ description: "Include data from deleted campaigns, ads, and adsets."
+ default: false
+ type: "boolean"
+ insights_lookback_window:
+ title: "Insights Lookback Window"
+ description: "The attribution window for the actions"
+ default: 28
+ minimum: 0
+ maximum: 28
+ type: "integer"
+ insights_days_per_job:
+ title: "Insights Days Per Job"
+ description: "Number of days to sync in one job. The more data you have\
+ \ - the smaller you want this parameter to be."
+ default: 7
+ minimum: 1
+ maximum: 30
+ type: "integer"
+ custom_insights:
+ title: "Custom Insights"
+ description: "A list wich contains insights entries, each entry must have\
+ \ a name and can contains fields, breakdowns or action_breakdowns)"
+ type: "array"
+ items:
+ title: "InsightConfig"
+ type: "object"
+ properties:
+ name:
+ title: "Name"
+ description: "The name value of insight"
+ type: "string"
+ fields:
+ title: "Fields"
+ description: "A list of chosen fields for fields parameter"
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ breakdowns:
+ title: "Breakdowns"
+ description: "A list of chosen breakdowns for breakdowns"
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ action_breakdowns:
+ title: "Action Breakdowns"
+ description: "A list of chosen action_breakdowns for action_breakdowns"
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ required:
+ - "name"
+ required:
+ - "account_id"
+ - "access_token"
+ - "start_date"
+ definitions:
+ InsightConfig:
+ title: "InsightConfig"
+ type: "object"
+ properties:
+ name:
+ title: "Name"
+ description: "The name value of insight"
+ type: "string"
+ fields:
+ title: "Fields"
+ description: "A list of chosen fields for fields parameter"
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ breakdowns:
+ title: "Breakdowns"
+ description: "A list of chosen breakdowns for breakdowns"
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ action_breakdowns:
+ title: "Action Breakdowns"
+ description: "A list of chosen action_breakdowns for action_breakdowns"
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ required:
+ - "name"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject: []
+ oauthFlowInitParameters: []
+ oauthFlowOutputParameters:
+ - - "access_token"
+- dockerImage: "airbyte/source-facebook-pages:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/facebook-pages"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Facebook Pages Spec"
+ type: "object"
+ required:
+ - "access_token"
+ - "page_id"
+ additionalProperties: true
+ properties:
+ access_token:
+ type: "string"
+ title: "Page Access Token"
+ description: "Facebook Page Access Token"
+ airbyte_secret: true
+ page_id:
+ type: "string"
+ title: "Page ID"
+ description: "Page ID"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject: []
+ oauthFlowInitParameters: []
+ oauthFlowOutputParameters:
+ - - "access_token"
+- dockerImage: "airbyte/source-file:0.2.6"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/file"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "File Source Spec"
+ type: "object"
+ additionalProperties: false
+ required:
+ - "dataset_name"
+ - "format"
+ - "url"
+ - "provider"
+ properties:
+ dataset_name:
+ type: "string"
+ description: "Name of the final table where to replicate this file (should\
+ \ include only letters, numbers dash and underscores)"
+ format:
+ type: "string"
+ enum:
+ - "csv"
+ - "json"
+ - "jsonl"
+ - "excel"
+ - "feather"
+ - "parquet"
+ default: "csv"
+ description: "File Format of the file to be replicated (Warning: some format\
+ \ may be experimental, please refer to docs)."
+ reader_options:
+ type: "string"
+ description: "This should be a valid JSON string used by each reader/parser\
+ \ to provide additional options and tune its behavior"
+ examples:
+ - "{}"
+ - "{'sep': ' '}"
+ url:
+ type: "string"
+ description: "URL path to access the file to be replicated"
+ provider:
+ type: "object"
+ description: "Storage Provider or Location of the file(s) to be replicated."
+ default: "Public Web"
+ oneOf:
+ - title: "HTTPS: Public Web"
+ required:
+ - "storage"
+ properties:
+ storage:
+ type: "string"
+ enum:
+ - "HTTPS"
+ default: "HTTPS"
+ - title: "GCS: Google Cloud Storage"
+ required:
+ - "storage"
+ properties:
+ storage:
+ type: "string"
+ enum:
+ - "GCS"
+ default: "GCS"
+ service_account_json:
+ type: "string"
+ description: "In order to access private Buckets stored on Google\
+ \ Cloud, this connector would need a service account json credentials\
+ \ with the proper permissions as described here. Please generate the credentials.json\
+ \ file and copy/paste its content to this field (expecting JSON\
+ \ formats). If accessing publicly available data, this field is\
+ \ not necessary."
+ - title: "S3: Amazon Web Services"
+ required:
+ - "storage"
+ properties:
+ storage:
+ type: "string"
+ enum:
+ - "S3"
+ default: "S3"
+ aws_access_key_id:
+ type: "string"
+ description: "In order to access private Buckets stored on AWS S3,\
+ \ this connector would need credentials with the proper permissions.\
+ \ If accessing publicly available data, this field is not necessary."
+ aws_secret_access_key:
+ type: "string"
+ description: "In order to access private Buckets stored on AWS S3,\
+ \ this connector would need credentials with the proper permissions.\
+ \ If accessing publicly available data, this field is not necessary."
+ airbyte_secret: true
+ - title: "AzBlob: Azure Blob Storage"
+ required:
+ - "storage"
+ - "storage_account"
+ properties:
+ storage:
+ type: "string"
+ enum:
+ - "AzBlob"
+ default: "AzBlob"
+ storage_account:
+ type: "string"
+ description: "The globally unique name of the storage account that\
+ \ the desired blob sits within. See here for more details."
+ sas_token:
+ type: "string"
+ description: "To access Azure Blob Storage, this connector would need\
+ \ credentials with the proper permissions. One option is a SAS (Shared\
+ \ Access Signature) token. If accessing publicly available data,\
+ \ this field is not necessary."
+ airbyte_secret: true
+ shared_key:
+ type: "string"
+ description: "To access Azure Blob Storage, this connector would need\
+ \ credentials with the proper permissions. One option is a storage\
+ \ account shared key (aka account key or access key). If accessing\
+ \ publicly available data, this field is not necessary."
+ airbyte_secret: true
+ - title: "SSH: Secure Shell"
+ required:
+ - "storage"
+ - "user"
+ - "host"
+ properties:
+ storage:
+ type: "string"
+ enum:
+ - "SSH"
+ default: "SSH"
+ user:
+ type: "string"
+ password:
+ type: "string"
+ airbyte_secret: true
+ host:
+ type: "string"
+ port:
+ type: "string"
+ default: "22"
+ - title: "SCP: Secure copy protocol"
+ required:
+ - "storage"
+ - "user"
+ - "host"
+ properties:
+ storage:
+ type: "string"
+ enum:
+ - "SCP"
+ default: "SCP"
+ user:
+ type: "string"
+ password:
+ type: "string"
+ airbyte_secret: true
+ host:
+ type: "string"
+ port:
+ type: "string"
+ default: "22"
+ - title: "SFTP: Secure File Transfer Protocol"
+ required:
+ - "storage"
+ - "user"
+ - "host"
+ properties:
+ storage:
+ type: "string"
+ enum:
+ - "SFTP"
+ default: "SFTP"
+ user:
+ type: "string"
+ password:
+ type: "string"
+ airbyte_secret: true
+ host:
+ type: "string"
+ port:
+ type: "string"
+ default: "22"
+ - title: "Local Filesystem (limited)"
+ required:
+ - "storage"
+ properties:
+ storage:
+ type: "string"
+ description: "WARNING: Note that local storage URL available for read\
+ \ must start with the local mount \"/local/\" at the moment until\
+ \ we implement more advanced docker mounting options..."
+ enum:
+ - "local"
+ default: "local"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-freshdesk:0.2.7"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/freshdesk"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Freshdesk Spec"
+ type: "object"
+ required:
+ - "domain"
+ - "api_key"
+ additionalProperties: false
+ properties:
+ domain:
+ type: "string"
+ description: "Freshdesk domain"
+ examples:
+ - "myaccount.freshdesk.com"
+ pattern:
+ - "^[a-zA-Z0-9._-]*\\.freshdesk\\.com$"
+ api_key:
+ type: "string"
+ description: "Freshdesk API Key. See the docs for more information on how to obtain this key."
+ airbyte_secret: true
+ requests_per_minute:
+ title: "Requests per minute"
+ type: "integer"
+ description: "Number of requests per minute that this source allowed to\
+ \ use."
+ start_date:
+ title: "Start date"
+ description: "Date from which to start pulling data."
+ format: "date-time"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2020-12-01T00:00:00Z"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-freshsales:0.1.0"
+ spec:
+ documentationUrl: "https://docsurl.com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Freshsales Spec"
+ type: "object"
+ required:
+ - "domain_name"
+ - "api_key"
+ additionalProperties: false
+ properties:
+ domain_name:
+ type: "string"
+ description: "Freshsales domain"
+ examples:
+ - "mydomain.myfreshworks.com"
+ api_key:
+ type: "string"
+ description: "Your API Access Key. See here. The key is case sensitive."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-freshservice:0.1.0"
+ spec:
+ documentationUrl: "https://hub.docker.com/r/airbyte/source-freshservice"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Freshservice Spec"
+ type: "object"
+ required:
+ - "domain_name"
+ - "api_key"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ domain_name:
+ type: "string"
+ description: "Freshservice domain"
+ examples:
+ - "mydomain.freshservice.com"
+ api_key:
+ title: "Api Key"
+ type: "string"
+ description: "Your API Access Key. See here. The key is case sensitive."
+ airbyte_secret: true
+ start_date:
+ title: "Replication Start Date"
+ type: "string"
+ description: "UTC date and time in the format 2020-10-01T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ examples:
+ - "2020-10-01T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-github:0.2.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/github"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Github Source Spec"
+ type: "object"
+ required:
+ - "start_date"
+ - "repository"
+ additionalProperties: true
+ properties:
+ credentials:
+ title: "Authentication mechanism"
+ description: "Choose how to authenticate to Github"
+ type: "object"
+ oneOf:
+ - type: "object"
+ title: "Authenticate via Github (Oauth)"
+ required:
+ - "access_token"
+ properties:
+ option_title:
+ type: "string"
+ title: "Credentials title"
+ description: "OAuth Credentials"
+ const: "OAuth Credentials"
+ access_token:
+ type: "string"
+ title: "Access Token"
+ description: "Oauth access token"
+ airbyte_secret: true
+ - type: "object"
+ title: "Authenticate with Personal Access Token"
+ required:
+ - "personal_access_token"
+ properties:
+ option_title:
+ type: "string"
+ title: "Credentials title"
+ description: "PAT Credentials"
+ const: "PAT Credentials"
+ personal_access_token:
+ type: "string"
+ title: "Personal Access Tokens"
+ description: "Log into Github and then generate a personal access token. To load balance your API quota consumption\
+ \ across multiple API tokens, input multiple tokens separated with\
+ \ \",\""
+ airbyte_secret: true
+ repository:
+ type: "string"
+ examples:
+ - "airbytehq/airbyte"
+ - "airbytehq/*"
+ title: "Github repositories"
+ description: "Space-delimited list of GitHub repositories/organizations,\
+ \ e.g. `airbytehq/airbyte` for single repository and `airbytehq/*` for\
+ \ get all repositories from organization"
+ start_date:
+ type: "string"
+ title: "Start date"
+ description: "The date from which you'd like to replicate data for GitHub\
+ \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\
+ \ will be replicated. Note that it will be used only in the following\
+ \ incremental streams: comments, commits and issues."
+ examples:
+ - "2021-03-01T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ branch:
+ type: "string"
+ title: "Branch"
+ examples:
+ - "airbytehq/airbyte/master"
+ description: "Space-delimited list of GitHub repository branches to pull\
+ \ commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified\
+ \ for a repository, the default branch will be pulled."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject:
+ - "credentials"
+ - "0"
+ oauthFlowInitParameters: []
+ oauthFlowOutputParameters:
+ - - "access_token"
+- dockerImage: "airbyte/source-gitlab:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/gitlab"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Gitlab Singer Spec"
+ type: "object"
+ required:
+ - "api_url"
+ - "private_token"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ api_url:
+ type: "string"
+ examples:
+ - "gitlab.com"
+ description: "Please enter your basic URL from Gitlab instance"
+ private_token:
+ type: "string"
+ description: "Log into your Gitlab account and then generate a personal\
+ \ Access Token."
+ airbyte_secret: true
+ groups:
+ type: "string"
+ examples:
+ - "airbyte.io"
+ description: "Space-delimited list of groups. e.g. airbyte.io"
+ projects:
+ type: "string"
+ examples:
+ - "airbyte.io/documentation"
+ description: "Space-delimited list of projects. e.g. airbyte.io/documentation\
+ \ meltano/tap-gitlab"
+ start_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data for Gitlab\
+ \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\
+ \ date will be replicated."
+ examples:
+ - "2021-03-01T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-google-ads:0.1.15"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/google-ads"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Google Ads Spec"
+ type: "object"
+ required:
+ - "credentials"
+ - "start_date"
+ - "customer_id"
+ additionalProperties: true
+ properties:
+ credentials:
+ type: "object"
+ title: "Google Credentials"
+ required:
+ - "developer_token"
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ properties:
+ developer_token:
+ type: "string"
+ title: "Developer Token"
+ description: "Developer token granted by Google to use their APIs. More\
+ \ instruction on how to find this value in our docs"
+ airbyte_secret: true
+ client_id:
+ type: "string"
+ title: "Client Id"
+ description: "Google client id. More instruction on how to find this\
+ \ value in our docs"
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "Google client secret. More instruction on how to find\
+ \ this value in our docs"
+ airbyte_secret: true
+ access_token:
+ type: "string"
+ title: "Access Token"
+ description: "Access token generated using developer_token, oauth_client_id,\
+ \ and oauth_client_secret. More instruction on how to find this value\
+ \ in our docs"
+ airbyte_secret: true
+ refresh_token:
+ type: "string"
+ title: "Refresh Token"
+ description: "Refresh token generated using developer_token, oauth_client_id,\
+ \ and oauth_client_secret. More instruction on how to find this value\
+ \ in our docs"
+ airbyte_secret: true
+ customer_id:
+ title: "Customer Id"
+ type: "string"
+ description: "Customer id must be specified as a 10-digit number without\
+ \ dashes. More instruction on how to find this value in our docs"
+ login_customer_id:
+ type: "string"
+ title: "Login Customer ID"
+ description: "If your access to the customer account is through a manager\
+ \ account, this field is required and must be set to the customer ID of\
+ \ the manager account (10-digit number without dashes). More information\
+ \ about this field you can see here"
+ start_date:
+ type: "string"
+ title: "Start Date"
+ description: "UTC date and time in the format 2017-01-25. Any data before\
+ \ this date will not be replicated."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ examples:
+ - "2017-01-25"
+ conversion_window_days:
+ title: "Conversion Window"
+ type: "integer"
+ description: "Define the historical replication lookback window in days"
+ minimum: 0
+ maximum: 1095
+ default: 14
+ examples:
+ - 14
+ custom_queries:
+ type: "array"
+ title: "Custom GAQL Queries"
+ items:
+ type: "object"
+ properties:
+ query:
+ type: "string"
+ title: "Custom query"
+ description: "A custom defined GAQL query for building the report.\
+ \ Should not contain segments.date expression as it used by incremental\
+ \ streams"
+ examples:
+ - "SELECT segments.ad_destination_type, campaign.advertising_channel_sub_type\
+ \ FROM campaign WHERE campaign.status = 'PAUSED'"
+ table_name:
+ type: "string"
+ title: "Destination table name"
+ description: "The table name in your destination database for choosen\
+ \ query."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject:
+ - "credentials"
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ - - "developer_token"
+ oauthFlowOutputParameters:
+ - - "access_token"
+ - - "refresh_token"
+- dockerImage: "airbyte/source-google-analytics-v4:0.1.9"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/google-analytics-v4"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Google Analytics V4 Spec"
+ type: "object"
+ required:
+ - "view_id"
+ - "start_date"
+ additionalProperties: true
+ properties:
+ view_id:
+ type: "string"
+ title: "View ID"
+ description: "The ID for the Google Analytics View you want to fetch data\
+ \ from. This can be found from the Google Analytics Account Explorer."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ title: "Start Date"
+ description: "A date in the format YYYY-MM-DD."
+ examples:
+ - "2020-06-01"
+ window_in_days:
+ type: "integer"
+ description: "The amount of days for each data-chunk begining from start_date.\
+ \ Bigger the value - faster the fetch. (Min=1, as for a Day; Max=364,\
+ \ as for a Year)."
+ examples:
+ - 30
+ - 60
+ - 90
+ - 120
+ - 200
+ - 364
+ default: 90
+ custom_reports:
+ title: "Custom Reports"
+ type: "string"
+ description: "A JSON array describing the custom reports you want to sync\
+ \ from GA. Check out the docs to get more information about this field."
+ credentials:
+ type: "object"
+ oneOf:
+ - title: "Authenticate via Google (Oauth)"
+ type: "object"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ properties:
+ auth_type:
+ type: "string"
+ const: "Client"
+ enum:
+ - "Client"
+ default: "Client"
+ order: 0
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID of your developer application"
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The client secret of your developer application"
+ airbyte_secret: true
+ refresh_token:
+ title: "Refresh Token"
+ type: "string"
+ description: "A refresh token generated using the above client ID\
+ \ and secret"
+ airbyte_secret: true
+ access_token:
+ title: "Access Token"
+ type: "string"
+ description: "A access token generated using the above client ID,\
+ \ secret and refresh_token"
+ airbyte_secret: true
+ - type: "object"
+ title: "Service Account Key Authentication"
+ required:
+ - "credentials_json"
+ properties:
+ auth_type:
+ type: "string"
+ const: "Service"
+ enum:
+ - "Service"
+ default: "Service"
+ order: 0
+ credentials_json:
+ type: "string"
+ description: "The JSON key of the service account to use for authorization"
+ examples:
+ - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\
+ \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }"
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject:
+ - "credentials"
+ - "0"
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ oauthFlowOutputParameters:
+ - - "access_token"
+ - - "refresh_token"
+- dockerImage: "airbyte/source-google-directory:0.1.8"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/google-directory"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Google Directory Spec"
+ type: "object"
+ required: []
+ additionalProperties: true
+ properties:
+ credentials:
+ title: "Google Credentials"
+ description: "Google APIs use the OAuth 2.0 protocol for authentication\
+ \ and authorization. The Source supports Web server application and Service accounts scenarios"
+ type: "object"
+ oneOf:
+ - title: "Sign in via Google (Oauth)"
+ description: "For these scenario user only needs to give permission to\
+ \ read Google Directory data"
+ type: "object"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ properties:
+ credentials_title:
+ type: "string"
+ title: "Credentials title"
+ description: "Authentication scenario"
+ const: "Web server app"
+ enum:
+ - "Web server app"
+ default: "Web server app"
+ order: 0
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The client ID of developer application"
+ airbyte_secret: true
+ client_secret:
+ title: "Client secret"
+ type: "string"
+ description: "The client secret of developer application"
+ airbyte_secret: true
+ refresh_token:
+ title: "Refresh Token"
+ type: "string"
+ description: "The token for obtaining new access token"
+ airbyte_secret: true
+ - title: "Service account Key"
+ description: "For these scenario user should obtain service account's\
+ \ credentials from the Google API Console and provide delegated email"
+ type: "object"
+ required:
+ - "credentials_json"
+ - "email"
+ properties:
+ credentials_title:
+ type: "string"
+ title: "Credentials title"
+ description: "Authentication scenario"
+ const: "Service accounts"
+ enum:
+ - "Service accounts"
+ default: "Service accounts"
+ order: 0
+ credentials_json:
+ type: "string"
+ title: "Credentials JSON"
+ description: "The contents of the JSON service account key. See the\
+ \ docs for more information on how to generate this key."
+ airbyte_secret: true
+ email:
+ type: "string"
+ title: "Email"
+ description: "The email of the user, which has permissions to access\
+ \ the Google Workspace Admin APIs."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject:
+ - "credentials"
+ - "0"
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ oauthFlowOutputParameters:
+ - - "refresh_token"
+- dockerImage: "airbyte/source-google-search-console:0.1.6"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/google-search-console"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Google Search Console Spec"
+ type: "object"
+ additionalProperties: false
+ required:
+ - "site_urls"
+ - "start_date"
+ - "authorization"
+ properties:
+ site_urls:
+ type: "array"
+ items:
+ type: "string"
+ description: "Website URLs property; do not include the domain-level property\
+ \ in the list"
+ examples:
+ - "https://example1.com"
+ - "https://example2.com"
+ start_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data in the format\
+ \ YYYY-MM-DD."
+ examples:
+ - "2021-01-01"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ end_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data in the format\
+ \ YYYY-MM-DD. Must be greater or equal start_date field"
+ examples:
+ - "2021-12-12"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ authorization:
+ type: "object"
+ title: "Authentication Type"
+ oneOf:
+ - title: "Authenticate via Google (Oauth)"
+ type: "object"
+ required:
+ - "auth_type"
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ properties:
+ auth_type:
+ type: "string"
+ const: "Client"
+ enum:
+ - "Client"
+ default: "Client"
+ order: 0
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID of your developer application"
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The client secret of your developer application"
+ airbyte_secret: true
+ access_token:
+ title: "Access Token"
+ type: "string"
+ description: "An access token generated using the above client ID\
+ \ and secret"
+ airbyte_secret: true
+ refresh_token:
+ title: "Refresh Token"
+ type: "string"
+ description: "A refresh token generated using the above client ID\
+ \ and secret"
+ airbyte_secret: true
+ - type: "object"
+ title: "Service Account Key Authentication"
+ required:
+ - "auth_type"
+ - "service_account_info"
+ - "email"
+ properties:
+ auth_type:
+ type: "string"
+ const: "Service"
+ enum:
+ - "Service"
+ default: "Service"
+ order: 0
+ service_account_info:
+ title: "Service Account JSON Key"
+ type: "string"
+ description: "The JSON key of the service account to use for authorization"
+ examples:
+ - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\
+ \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }"
+ email:
+ title: "Admin Email"
+ type: "string"
+ description: "The email of the user which has permissions to access\
+ \ the Google Workspace Admin APIs."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject:
+ - "authorization"
+ - "0"
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ oauthFlowOutputParameters:
+ - - "access_token"
+ - - "refresh_token"
+- dockerImage: "airbyte/source-google-sheets:0.2.6"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/google-sheets"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Stripe Source Spec"
+ type: "object"
+ required:
+ - "spreadsheet_id"
+ additionalProperties: true
+ properties:
+ spreadsheet_id:
+ type: "string"
+ description: "The ID of the spreadsheet to be replicated."
+ credentials:
+ type: "object"
+ oneOf:
+ - title: "Authenticate via Google (Oauth)"
+ type: "object"
+ required:
+ - "auth_type"
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ properties:
+ auth_type:
+ type: "string"
+ const: "Client"
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID of your developer application"
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The client secret of your developer application"
+ airbyte_secret: true
+ refresh_token:
+ title: "Refresh Token"
+ type: "string"
+ description: "A refresh token generated using the above client ID\
+ \ and secret"
+ airbyte_secret: true
+ - title: "Service Account Key Authentication"
+ type: "object"
+ required:
+ - "auth_type"
+ - "service_account_info"
+ properties:
+ auth_type:
+ type: "string"
+ const: "Service"
+ service_account_info:
+ type: "string"
+ description: "The JSON key of the service account to use for authorization"
+ examples:
+ - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\
+ \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject:
+ - "credentials"
+ - 0
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ oauthFlowOutputParameters:
+ - - "refresh_token"
+- dockerImage: "airbyte/source-google-workspace-admin-reports:0.1.5"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/google-workspace-admin-reports"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Google Directory Spec"
+ type: "object"
+ required:
+ - "credentials_json"
+ - "email"
+ additionalProperties: false
+ properties:
+ credentials_json:
+ type: "string"
+ description: "The contents of the JSON service account key. See the docs for more information on how to generate this key."
+ airbyte_secret: true
+ email:
+ type: "string"
+ description: "The email of the user, which has permissions to access the\
+ \ Google Workspace Admin APIs."
+ lookback:
+ type: "integer"
+ minimum: 0
+ maximum: 180
+ description: "Sets the range of time shown in the report. Reports API allows\
+ \ from up to 180 days ago. "
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-greenhouse:0.2.5"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/greenhouse"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Greenhouse Spec"
+ type: "object"
+ required:
+ - "api_key"
+ additionalProperties: false
+ properties:
+ api_key:
+ type: "string"
+ description: "Greenhouse API Key. See the docs for more information on how to generate this key."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-harvest:0.1.5"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/harvest"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Harvest Spec"
+ type: "object"
+ required:
+ - "api_token"
+ - "account_id"
+ - "replication_start_date"
+ additionalProperties: false
+ properties:
+ api_token:
+ title: "API Token"
+ description: "Harvest API Token."
+ airbyte_secret: true
+ type: "string"
+ account_id:
+ title: "Account ID"
+ description: "Harvest account ID. Required for all Harvest requests in pair\
+ \ with API Key"
+ airbyte_secret: true
+ type: "string"
+ replication_start_date:
+ title: "Replication Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2017-01-25T00:00:00Z"
+ type: "string"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+- dockerImage: "airbyte/source-hubspot:0.1.22"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/hubspot"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Hubspot Source Spec"
+ type: "object"
+ required:
+ - "start_date"
+ - "credentials"
+ additionalProperties: false
+ properties:
+ start_date:
+ type: "string"
+ title: "Replication start date"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ examples:
+ - "2017-01-25T00:00:00Z"
+ credentials:
+ title: "Authentication mechanism"
+ description: "Choose either to provide the API key or the OAuth2.0 credentials"
+ type: "object"
+ oneOf:
+ - type: "object"
+ title: "Authenticate via Hubspot (Oauth)"
+ required:
+ - "redirect_uri"
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ - "access_token"
+ - "credentials_title"
+ properties:
+ credentials_title:
+ type: "string"
+ title: "Credentials title"
+ description: "Name of the credentials set"
+ const: "OAuth Credentials"
+ enum:
+ - "OAuth Credentials"
+ default: "OAuth Credentials"
+ order: 0
+ client_id:
+ title: "Client ID"
+ description: "Hubspot client_id. See our docs if you need help finding this id."
+ type: "string"
+ examples:
+ - "123456789000"
+ client_secret:
+ title: "Client Secret"
+ description: "Hubspot client_secret. See our docs if you need help finding this secret."
+ type: "string"
+ examples:
+ - "secret"
+ airbyte_secret: true
+ refresh_token:
+ title: "Refresh token"
+ description: "Hubspot refresh_token. See our docs if you need help generating the token."
+ type: "string"
+ examples:
+ - "refresh_token"
+ airbyte_secret: true
+ - type: "object"
+ title: "API key"
+ required:
+ - "api_key"
+ - "credentials_title"
+ properties:
+ credentials_title:
+ type: "string"
+ title: "Credentials title"
+ description: "Name of the credentials set"
+ const: "API Key Credentials"
+ enum:
+ - "API Key Credentials"
+ default: "API Key Credentials"
+ order: 0
+ api_key:
+ title: "API key"
+ description: "Hubspot API Key. See our docs if you need help finding this key."
+ type: "string"
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject:
+ - "credentials"
+ - "0"
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ - - "refresh_token"
+ oauthFlowOutputParameters:
+ - - "refresh_token"
+- dockerImage: "airbyte/source-db2:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/db2"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "IBM Db2 Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "db"
+ - "username"
+ - "password"
+ additionalProperties: false
+ properties:
+ host:
+ description: "Host of the Db2."
+ type: "string"
+ port:
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 8123
+ examples:
+ - "8123"
+ db:
+ description: "Name of the database."
+ type: "string"
+ examples:
+ - "default"
+ username:
+ description: "Username to use to access the database."
+ type: "string"
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-instagram:0.1.9"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/instagram"
+ changelogUrl: "https://docs.airbyte.io/integrations/sources/instagram"
+ connectionSpecification:
+ title: "Source Instagram"
+ type: "object"
+ properties:
+ start_date:
+ title: "Start Date"
+ description: "The date from which you'd like to replicate data for User\
+ \ Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after\
+ \ this date will be replicated."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2017-01-25T00:00:00Z"
+ type: "string"
+ format: "date-time"
+ access_token:
+ title: "Access Token"
+ description: "The value of the access token generated. See the docs for\
+ \ more information"
+ airbyte_secret: true
+ type: "string"
+ required:
+ - "start_date"
+ - "access_token"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject: []
+ oauthFlowInitParameters: []
+ oauthFlowOutputParameters:
+ - - "access_token"
+- dockerImage: "airbyte/source-intercom:0.1.6"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/intercom"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Intercom Spec"
+ type: "object"
+ required:
+ - "access_token"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ access_token:
+ type: "string"
+ description: "Intercom Access Token. See the docs for more information on how to obtain this key."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data for Intercom\
+ \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\
+ \ date will be replicated."
+ examples:
+ - "2020-11-16T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-iterable:0.1.11"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/iterable"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Iterable Spec"
+ type: "object"
+ required:
+ - "start_date"
+ - "api_key"
+ additionalProperties: false
+ properties:
+ start_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data for Iterable,\
+ \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\
+ \ will be replicated."
+ examples:
+ - "2021-04-01T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ api_key:
+ type: "string"
+ description: "Iterable API Key. See the docs for more information on how to obtain this key."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-jira:0.2.14"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/jira"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Jira Spec"
+ type: "object"
+ required:
+ - "api_token"
+ - "domain"
+ - "email"
+ additionalProperties: true
+ properties:
+ api_token:
+ type: "string"
+ description: "Jira API Token. See the docs for more information on how to generate this key."
+ airbyte_secret: true
+ domain:
+ type: "string"
+ examples:
+ - "domainname.atlassian.net"
+ pattern: "^[a-zA-Z0-9._-]*\\.atlassian\\.net$"
+ description: "Domain for your Jira account, e.g. airbyteio.atlassian.net"
+ email:
+ type: "string"
+ description: "The user email for your Jira account"
+ projects:
+ type: "array"
+ title: "Projects"
+ items:
+ type: "string"
+ examples:
+ - "PROJ1"
+ - "PROJ2"
+ description: "Comma-separated list of Jira project keys to replicate data\
+ \ for"
+ start_date:
+ type: "string"
+ title: "Start Date"
+ description: "The date from which you'd like to replicate data for Jira\
+ \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\
+ \ will be replicated. Note that it will be used only in the following\
+ \ incremental streams: issues."
+ examples:
+ - "2021-03-01T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ additional_fields:
+ type: "array"
+ title: "Additional Fields"
+ items:
+ type: "string"
+ description: "Comma-separated list of additional fields to include in replicating\
+ \ issues"
+ examples:
+ - "Field A"
+ - "Field B"
+ expand_issue_changelog:
+ type: "boolean"
+ title: "Expand Issue Changelog"
+ description: "Expand the changelog when replicating issues"
+ default: false
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-kafka:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/kafka"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Kafka Source Spec"
+ type: "object"
+ required:
+ - "bootstrap_servers"
+ - "subscription"
+ - "protocol"
+ additionalProperties: false
+ properties:
+ bootstrap_servers:
+ title: "Bootstrap servers"
+ description: "A list of host/port pairs to use for establishing the initial\
+ \ connection to the Kafka cluster. The client will make use of all servers\
+ \ irrespective of which servers are specified here for bootstrapping—this\
+ \ list only impacts the initial hosts used to discover the full set of\
+ \ servers. This list should be in the form host1:port1,host2:port2,....\
+ \ Since these servers are just used for the initial connection to discover\
+ \ the full cluster membership (which may change dynamically), this list\
+ \ need not contain the full set of servers (you may want more than one,\
+ \ though, in case a server is down)."
+ type: "string"
+ examples:
+ - "kafka-broker1:9092,kafka-broker2:9092"
+ subscription:
+ title: "Subscribe method"
+ type: "object"
+ description: "You can choose to manually assign a list of partitions, or\
+ \ subscribe to all topics matching specified pattern to get dynamically\
+ \ assigned partitions"
+ oneOf:
+ - title: "Manually assign a list of partitions"
+ required:
+ - "subscription_type"
+ - "topic_partitions"
+ properties:
+ subscription_type:
+ description: "Manually assign a list of partitions to this consumer.\
+ \ This interface does not allow for incremental assignment and will\
+ \ replace the previous assignment (if there is one).\nIf the given\
+ \ list of topic partitions is empty, it is treated the same as unsubscribe()."
+ type: "string"
+ const: "assign"
+ enum:
+ - "assign"
+ default: "assign"
+ topic_partitions:
+ title: "List of topic:partition pairs"
+ type: "string"
+ examples:
+ - "sample.topic:0, sample.topic:1"
+ - title: "Subscribe to all topics matching specified pattern"
+ required:
+ - "subscription_type"
+ - "topic_pattern"
+ properties:
+ subscription_type:
+ description: "Topic pattern from which the records will be read."
+ type: "string"
+ const: "subscribe"
+ enum:
+ - "subscribe"
+ default: "subscribe"
+ topic_pattern:
+ title: "Topic pattern"
+ type: "string"
+ examples:
+ - "sample.topic"
+ test_topic:
+ title: "Test topic"
+ description: "Topic to test if Airbyte can consume messages."
+ type: "string"
+ examples:
+ - "test.topic"
+ group_id:
+ title: "Group ID"
+ description: "Group id."
+ type: "string"
+ examples:
+ - "group.id"
+ max_poll_records:
+ title: "Max poll records"
+ description: "The maximum number of records returned in a single call to\
+ \ poll(). Note, that max_poll_records does not impact the underlying fetching\
+ \ behavior. The consumer will cache the records from each fetch request\
+ \ and returns them incrementally from each poll."
+ type: "integer"
+ default: 500
+ protocol:
+ title: "Protocol"
+ type: "object"
+ description: "Protocol used to communicate with brokers."
+ oneOf:
+ - title: "PLAINTEXT"
+ required:
+ - "security_protocol"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "PLAINTEXT"
+ default: "PLAINTEXT"
+ - title: "SASL PLAINTEXT"
+ required:
+ - "security_protocol"
+ - "sasl_mechanism"
+ - "sasl_jaas_config"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "SASL_PLAINTEXT"
+ default: "SASL_PLAINTEXT"
+ sasl_mechanism:
+ title: "SASL mechanism"
+ description: "SASL mechanism used for client connections. This may\
+ \ be any mechanism for which a security provider is available."
+ type: "string"
+ default: "PLAIN"
+ enum:
+ - "PLAIN"
+ sasl_jaas_config:
+ title: "SASL JAAS config"
+ description: "JAAS login context parameters for SASL connections in\
+ \ the format used by JAAS configuration files."
+ type: "string"
+ default: ""
+ airbyte_secret: true
+ - title: "SASL SSL"
+ required:
+ - "security_protocol"
+ - "sasl_mechanism"
+ - "sasl_jaas_config"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "SASL_SSL"
+ default: "SASL_SSL"
+ sasl_mechanism:
+ title: "SASL mechanism"
+ description: "SASL mechanism used for client connections. This may\
+ \ be any mechanism for which a security provider is available."
+ type: "string"
+ default: "GSSAPI"
+ enum:
+ - "GSSAPI"
+ - "OAUTHBEARER"
+ - "SCRAM-SHA-256"
+ sasl_jaas_config:
+ title: "SASL JAAS config"
+ description: "JAAS login context parameters for SASL connections in\
+ \ the format used by JAAS configuration files."
+ type: "string"
+ default: ""
+ airbyte_secret: true
+ client_id:
+ title: "Client ID"
+ description: "An id string to pass to the server when making requests. The\
+ \ purpose of this is to be able to track the source of requests beyond\
+ \ just ip/port by allowing a logical application name to be included in\
+ \ server-side request logging."
+ type: "string"
+ examples:
+ - "airbyte-consumer"
+ enable_auto_commit:
+ title: "Enable auto commit"
+ description: "If true the consumer's offset will be periodically committed\
+ \ in the background."
+ type: "boolean"
+ default: true
+ auto_commit_interval_ms:
+ title: "Auto commit interval ms"
+ description: "The frequency in milliseconds that the consumer offsets are\
+ \ auto-committed to Kafka if enable.auto.commit is set to true."
+ type: "integer"
+ default: 5000
+ client_dns_lookup:
+ title: "Client DNS lookup"
+ description: "Controls how the client uses DNS lookups. If set to use_all_dns_ips,\
+ \ connect to each returned IP address in sequence until a successful connection\
+ \ is established. After a disconnection, the next IP is used. Once all\
+ \ IPs have been used once, the client resolves the IP(s) from the hostname\
+ \ again. If set to resolve_canonical_bootstrap_servers_only, resolve each\
+ \ bootstrap address into a list of canonical names. After the bootstrap\
+ \ phase, this behaves the same as use_all_dns_ips. If set to default (deprecated),\
+ \ attempt to connect to the first IP address returned by the lookup, even\
+ \ if the lookup returns multiple IP addresses."
+ type: "string"
+ default: "use_all_dns_ips"
+ enum:
+ - "default"
+ - "use_all_dns_ips"
+ - "resolve_canonical_bootstrap_servers_only"
+ retry_backoff_ms:
+ title: "Retry backoff ms"
+ description: "The amount of time to wait before attempting to retry a failed\
+ \ request to a given topic partition. This avoids repeatedly sending requests\
+ \ in a tight loop under some failure scenarios."
+ type: "integer"
+ default: 100
+ request_timeout_ms:
+ title: "Request timeout ms"
+ description: "The configuration controls the maximum amount of time the\
+ \ client will wait for the response of a request. If the response is not\
+ \ received before the timeout elapses the client will resend the request\
+ \ if necessary or fail the request if retries are exhausted."
+ type: "integer"
+ default: 30000
+ receive_buffer_bytes:
+ title: "Receive buffer bytes"
+ description: "The size of the TCP receive buffer (SO_RCVBUF) to use when\
+ \ reading data. If the value is -1, the OS default will be used."
+ type: "integer"
+ default: 32768
+ auto_offset_reset:
+ title: "Auto offset reset"
+ description: "What to do when there is no initial offset in Kafka or if\
+ \ the current offset does not exist any more on the server - earliest:\
+ \ automatically reset the offset to the earliest offset, latest: automatically\
+ \ reset the offset to the latest offset, none: throw exception to the\
+ \ consumer if no previous offset is found for the consumer's group, anything\
+ \ else: throw exception to the consumer."
+ type: "string"
+ default: "latest"
+ enum:
+ - "latest"
+ - "earliest"
+ - "none"
+ repeated_calls:
+ title: "Repeated calls"
+ description: "The number of repeated calls to poll() if no messages were\
+ \ received."
+ type: "integer"
+ default: 3
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ supported_source_sync_modes:
+ - "append"
+- dockerImage: "airbyte/source-klaviyo:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/klaviyo"
+ changelogUrl: "https://docs.airbyte.io/integrations/sources/klaviyo"
+ connectionSpecification:
+ title: "Klaviyo Spec"
+ type: "object"
+ properties:
+ api_key:
+ title: "Api Key"
+ description: "Klaviyo API Key. See our docs if you need help finding this key."
+ airbyte_secret: true
+ type: "string"
+ start_date:
+ title: "Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2017-01-25T00:00:00Z"
+ type: "string"
+ required:
+ - "api_key"
+ - "start_date"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+- dockerImage: "airbyte/source-lever-hiring:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/lever-hiring"
+ changelogUrl: "https://docs.airbyte.io/integrations/sources/lever-hiring#changelog"
+ connectionSpecification:
+ title: "Lever Hiring Spec"
+ type: "object"
+ properties:
+ client_id:
+ title: "Client Id"
+ description: "The client application id as provided when registering the\
+ \ application with Lever."
+ type: "string"
+ client_secret:
+ title: "Client Secret"
+ description: "The application secret as provided when registering the application\
+ \ with Lever."
+ airbyte_secret: true
+ type: "string"
+ refresh_token:
+ title: "Refresh Token"
+ description: "The refresh token your application will need to submit to\
+ \ get a new access token after it's expired."
+ type: "string"
+ environment:
+ title: "Environment"
+ description: "Sandbox or Production environment."
+ default: "Production"
+ enum:
+ - "Sandbox"
+ - "Production"
+ type: "string"
+ start_date:
+ title: "Start Date"
+ description: "UTC date and time in the format 2019-02-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2021-04-25T00:00:00Z"
+ type: "string"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ - "start_date"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject: []
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ - - "refresh_token"
+ oauthFlowOutputParameters: []
+- dockerImage: "airbyte/source-linkedin-ads:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/linkedin-ads"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Linkedin Ads Spec"
+ type: "object"
+ required:
+ - "start_date"
+ - "access_token"
+ additionalProperties: false
+ properties:
+ start_date:
+ type: "string"
+ title: "Start Date"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ description: "Date in the format 2020-09-17. Any data before this date will\
+ \ not be replicated."
+ examples:
+ - "2021-05-17"
+ access_token:
+ type: "string"
+ title: "Access Token"
+ description: "The token value ganerated using Auth Code"
+ airbyte_secret: true
+ account_ids:
+ title: "Account IDs"
+ type: "array"
+ description: "Specify the Account IDs separated by space, from which to\
+ \ pull the data. Leave empty to pull from all associated accounts."
+ items:
+ type: "integer"
+ default: []
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-looker:0.2.5"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/looker"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Looker Spec"
+ type: "object"
+ required:
+ - "domain"
+ - "client_id"
+ - "client_secret"
+ additionalProperties: false
+ properties:
+ domain:
+ type: "string"
+ examples:
+ - "domainname.looker.com"
+ - "looker.clientname.com"
+ - "123.123.124.123:8000"
+ description: "Domain for your Looker account, e.g. airbyte.cloud.looker.com,looker.[clientname].com,IP\
+ \ address"
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID is first part of an API3 key that is specific\
+ \ to each Looker user. See the docs for more information on how to generate this key."
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The Client Secret is second part of an API3 key."
+ run_look_ids:
+ title: "Look IDs to Run"
+ type: "array"
+ items:
+ type: "string"
+ pattern: "^[0-9]*$"
+ description: "The IDs of any Looks to run (optional)"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-mailchimp:0.2.8"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/mailchimp"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Mailchimp Spec"
+ type: "object"
+ required:
+ - "username"
+ - "apikey"
+ additionalProperties: false
+ properties:
+ username:
+ type: "string"
+ description: "The Username or email you use to sign into Mailchimp"
+ apikey:
+ type: "string"
+ airbyte_secret: true
+ description: "API Key. See the docs for information on how to generate this key."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-marketo:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/marketo"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Marketo Spec"
+ type: "object"
+ required:
+ - "domain_url"
+ - "client_id"
+ - "client_secret"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ domain_url:
+ type: "string"
+ description: "Your Marketo Base URL. See the docs for info on how to obtain this."
+ examples:
+ - "https://000-AAA-000.mktorest.com"
+ airbyte_secret: true
+ client_id:
+ type: "string"
+ description: "Your Marketo client_id. See the docs for info on how to obtain this."
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ description: "Your Marketo client secret. See the docs for info on how to obtain this."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ description: "Data generated in Marketo after this date will be replicated.\
+ \ This date must be specified in the format YYYY-MM-DDT00:00:00Z."
+ examples:
+ - "2020-09-25T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ window_in_days:
+ type: "integer"
+ description: "The amount of days for each data-chunk begining from start_date.\
+ \ (Min=1, as for a Day; Max=30, as for a Month)."
+ examples:
+ - 1
+ - 5
+ - 10
+ - 15
+ - 30
+ default: 30
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-mssql:0.3.6"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MSSQL Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ additionalProperties: false
+ properties:
+ host:
+ description: "Hostname of the database."
+ type: "string"
+ port:
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ examples:
+ - "1433"
+ database:
+ description: "Name of the database."
+ type: "string"
+ examples:
+ - "master"
+ username:
+ description: "Username to use to access the database."
+ type: "string"
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ ssl_method:
+ title: "SSL Method"
+ type: "object"
+ description: "Encryption method to use when communicating with the database"
+ order: 6
+ oneOf:
+ - title: "Unencrypted"
+ additionalProperties: false
+ description: "Data transfer will not be encrypted."
+ required:
+ - "ssl_method"
+ properties:
+ ssl_method:
+ type: "string"
+ const: "unencrypted"
+ enum:
+ - "unencrypted"
+ default: "unencrypted"
+ - title: "Encrypted (trust server certificate)"
+ additionalProperties: false
+ description: "Use the cert provided by the server without verification.\
+ \ (For testing purposes only!)"
+ required:
+ - "ssl_method"
+ properties:
+ ssl_method:
+ type: "string"
+ const: "encrypted_trust_server_certificate"
+ enum:
+ - "encrypted_trust_server_certificate"
+ default: "encrypted_trust_server_certificate"
+ - title: "Encrypted (verify certificate)"
+ additionalProperties: false
+ description: "Verify and use the cert provided by the server."
+ required:
+ - "ssl_method"
+ - "trustStoreName"
+ - "trustStorePassword"
+ properties:
+ ssl_method:
+ type: "string"
+ const: "encrypted_verify_certificate"
+ enum:
+ - "encrypted_verify_certificate"
+ default: "encrypted_verify_certificate"
+ hostNameInCertificate:
+ title: "Host Name In Certificate"
+ type: "string"
+ description: "Specifies the host name of the server. The value of\
+ \ this property must match the subject property of the certificate."
+ order: 7
+ replication_method:
+ type: "string"
+ title: "Replication Method"
+ description: "Replication method to use for extracting data from the database.\
+ \ STANDARD replication requires no setup on the DB side but will not be\
+ \ able to represent deletions incrementally. CDC uses {TBC} to detect\
+ \ inserts, updates, and deletes. This needs to be configured on the source\
+ \ database itself."
+ default: "STANDARD"
+ enum:
+ - "STANDARD"
+ - "CDC"
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials for logging\
+ \ into the jump server host."
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-microsoft-teams:0.2.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/microsoft-teams"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Microsoft Teams Spec"
+ type: "object"
+ required:
+ - "tenant_id"
+ - "client_id"
+ - "client_secret"
+ - "period"
+ additionalProperties: false
+ properties:
+ tenant_id:
+ title: "Directory (tenant) ID"
+ type: "string"
+ description: "Directory (tenant) ID"
+ client_id:
+ title: "Application (client) ID"
+ type: "string"
+ description: "Application (client) ID"
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "Client secret"
+ airbyte_secret: true
+ period:
+ type: "string"
+ description: "Specifies the length of time over which the Team Device Report\
+ \ stream is aggregated. The supported values are: D7, D30, D90, and D180."
+ examples:
+ - "D7"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-mixpanel:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/mixpanel"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Mixpanel Spec"
+ type: "object"
+ required:
+ - "api_secret"
+ additionalProperties: true
+ properties:
+ api_secret:
+ type: "string"
+ description: "Mixpanel API Secret. See the docs for more information on how to obtain this key."
+ airbyte_secret: true
+ attribution_window:
+ type: "integer"
+ description: "Latency minimum number of days to look-back to account for\
+ \ delays in attributing accurate results. Default attribution window is\
+ \ 5 days."
+ default: 5
+ date_window_size:
+ type: "integer"
+ description: "Number of days for date window looping through transactional\
+ \ endpoints with from_date and to_date. Default date_window_size is 30\
+ \ days. Clients with large volumes of events may want to decrease this\
+ \ to 14, 7, or even down to 1-2 days."
+ default: 30
+ project_timezone:
+ type: "string"
+ description: "Time zone in which integer date times are stored. The project\
+ \ timezone may be found in the project settings in the Mixpanel console."
+ default: "US/Pacific"
+ examples:
+ - "US/Pacific"
+ - "UTC"
+ select_properties_by_default:
+ type: "boolean"
+ description: "Setting this config parameter to true ensures that new properties\
+ \ on events and engage records are captured. Otherwise new properties\
+ \ will be ignored"
+ default: true
+ start_date:
+ type: "string"
+ description: "The default value to use if no bookmark exists for an endpoint.\
+ \ Default is 1 year ago."
+ examples:
+ - "2021-11-16"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$"
+ region:
+ type: "string"
+ enum:
+ - "US"
+ - "EU"
+ default: "US"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-monday:0.1.0"
+ spec:
+ documentationUrl: "https://docsurl.com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Monday Spec"
+ type: "object"
+ required:
+ - "api_token"
+ additionalProperties: false
+ properties:
+ api_token:
+ type: "string"
+ description: "This is the API token to authenticate requests to Monday.\
+ \ Profile picture (bottom left) => Admin => API"
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-mongodb-v2:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/mongodb-v2"
+ changelogUrl: "https://docs.airbyte.io/integrations/sources/mongodb-v2"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MongoDb Source Spec"
+ type: "object"
+ required:
+ - "database"
+ additionalProperties: true
+ properties:
+ instance_type:
+ type: "object"
+ title: "MongoDb instance type"
+ description: "MongoDb instance to connect to. For MongoDB Atlas and Replica\
+ \ Set TLS connection is used by default."
+ order: 0
+ oneOf:
+ - title: "Standalone MongoDb Instance"
+ required:
+ - "instance"
+ - "host"
+ - "port"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "standalone"
+ default: "standalone"
+ host:
+ title: "Host"
+ type: "string"
+ description: "Host of a Mongo database to be replicated."
+ order: 0
+ port:
+ title: "Port"
+ type: "integer"
+ description: "Port of a Mongo database to be replicated."
+ minimum: 0
+ maximum: 65536
+ default: 27017
+ examples:
+ - "27017"
+ order: 1
+ tls:
+ title: "TLS connection"
+ type: "boolean"
+ description: "Indicates whether TLS encryption protocol will be used\
+ \ to connect to MongoDB. It is recommended to use TLS connection\
+ \ if possible. For more information see documentation."
+ default: false
+ order: 2
+ - title: "Replica Set"
+ required:
+ - "instance"
+ - "server_addresses"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "replica"
+ default: "replica"
+ server_addresses:
+ title: "Server addresses"
+ type: "string"
+ description: "The members of a replica set. Please specify `host`:`port`\
+ \ of each member seperated by comma."
+ examples:
+ - "host1:27017,host2:27017,host3:27017"
+ order: 0
+ replica_set:
+ title: "Replica Set"
+ type: "string"
+ description: "A replica set name."
+ order: 1
+ - title: "MongoDB Atlas"
+ additionalProperties: false
+ required:
+ - "instance"
+ - "cluster_url"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "atlas"
+ default: "atlas"
+ cluster_url:
+ title: "Cluster URL"
+ type: "string"
+ description: "URL of a cluster to connect to."
+ order: 0
+ database:
+ title: "Database name"
+ type: "string"
+ description: "Database to be replicated."
+ order: 1
+ user:
+ title: "User"
+ type: "string"
+ description: "User"
+ order: 2
+ password:
+ title: "Password"
+ type: "string"
+ description: "Password"
+ airbyte_secret: true
+ order: 3
+ auth_source:
+ title: "Authentication source"
+ type: "string"
+ description: "Authentication source where user information is stored"
+ default: "admin"
+ examples:
+ - "admin"
+ order: 4
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-mysql:0.4.9"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/mysql"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MySql Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ - "replication_method"
+ additionalProperties: false
+ properties:
+ host:
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 3306
+ examples:
+ - "3306"
+ order: 1
+ database:
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ username:
+ description: "Username to use to access the database."
+ type: "string"
+ order: 3
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ jdbc_url_params:
+ description: "Additional properties to pass to the jdbc url string when\
+ \ connecting to the database formatted as 'key=value' pairs separated\
+ \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)"
+ type: "string"
+ order: 5
+ ssl:
+ title: "SSL Connection"
+ description: "Encrypt data using SSL."
+ type: "boolean"
+ default: true
+ order: 6
+ replication_method:
+ type: "string"
+ title: "Replication Method"
+ description: "Replication method to use for extracting data from the database.\
+ \ STANDARD replication requires no setup on the DB side but will not be\
+ \ able to represent deletions incrementally. CDC uses the Binlog to detect\
+ \ inserts, updates, and deletes. This needs to be configured on the source\
+ \ database itself."
+ order: 7
+ default: "STANDARD"
+ enum:
+ - "STANDARD"
+ - "CDC"
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-okta:0.1.4"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/okta"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Okta Spec"
+ type: "object"
+ required:
+ - "token"
+ - "base_url"
+ additionalProperties: false
+ properties:
+ token:
+ type: "string"
+ title: "API Token"
+ description: "A Okta token. See the docs for instructions on how to generate it."
+ airbyte_secret: true
+ base_url:
+ type: "string"
+ title: "Base URL"
+ description: "The Okta base URL."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-onesignal:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/onesignal"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "OneSignal Source Spec"
+ type: "object"
+ required:
+ - "user_auth_key"
+ - "start_date"
+ - "outcome_names"
+ additionalProperties: false
+ properties:
+ user_auth_key:
+ type: "string"
+ description: "OneSignal User Auth Key, see the docs for more information on how to obtain this key."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data for OneSignal\
+ \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\
+ \ date will be replicated."
+ examples:
+ - "2020-11-16T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ outcome_names:
+ type: "string"
+ description: "Comma-separated list of names and the value (sum/count) for\
+ \ the returned outcome data. See the docs for more details"
+ examples:
+ - "os__session_duration.count,os__click.count,CustomOutcomeName.sum"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-oracle:0.3.8"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/oracle"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Oracle Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "sid"
+ - "username"
+ additionalProperties: false
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ port:
+ title: "Port"
+ description: "Port of the database.\nOracle Corporations recommends the\
+ \ following port numbers:\n1521 - Default listening port for client connections\
+ \ to the listener. \n2484 - Recommended and officially registered listening\
+ \ port for client connections to the listener using TCP/IP with SSL"
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 1521
+ sid:
+ title: "SID (Oracle System Identifier)"
+ type: "string"
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ schemas:
+ title: "Schemas"
+ description: "List of schemas to sync from. Defaults to user. Case sensitive."
+ type: "array"
+ items:
+ type: "string"
+ minItems: 1
+ uniqueItems: true
+ encryption:
+ title: "Encryption"
+ type: "object"
+ description: "Encryption method to use when communicating with the database"
+ order: 6
+ oneOf:
+ - title: "Unencrypted"
+ additionalProperties: false
+ description: "Data transfer will not be encrypted."
+ required:
+ - "encryption_method"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "unencrypted"
+ enum:
+ - "unencrypted"
+ default: "unencrypted"
+ - title: "Native Network Ecryption (NNE)"
+ additionalProperties: false
+ description: "Native network encryption gives you the ability to encrypt\
+ \ database connections, without the configuration overhead of TCP/IP\
+ \ and SSL/TLS and without the need to open and listen on different ports."
+ required:
+ - "encryption_method"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "client_nne"
+ enum:
+ - "client_nne"
+ default: "client_nne"
+ encryption_algorithm:
+ type: "string"
+ description: "This parameter defines the encryption algorithm to be\
+ \ used"
+ title: "Encryption Algorithm"
+ default: "AES256"
+ enum:
+ - "AES256"
+ - "RC4_56"
+ - "3DES168"
+ - title: "TLS Encrypted (verify certificate)"
+ additionalProperties: false
+ description: "Verify and use the cert provided by the server."
+ required:
+ - "encryption_method"
+ - "ssl_certificate"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "encrypted_verify_certificate"
+ enum:
+ - "encrypted_verify_certificate"
+ default: "encrypted_verify_certificate"
+ ssl_certificate:
+ title: "SSL PEM file"
+ description: "Privacy Enhanced Mail (PEM) files are concatenated certificate\
+ \ containers frequently used in certificate installations"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-paypal-transaction:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/paypal-transactions"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Paypal Transaction Search"
+ type: "object"
+ required:
+ - "client_id"
+ - "secret"
+ - "start_date"
+ - "is_sandbox"
+ additionalProperties: true
+ properties:
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Paypal Client ID for API credentials"
+ secret:
+ title: "Secret"
+ type: "string"
+ description: "The Secret for a given Client ID."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ title: "Start Date"
+ description: "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\
+ \ present time"
+ examples:
+ - "2021-06-11T23:59:59-00:00"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}[+-][0-9]{2}:[0-9]{2}$"
+ is_sandbox:
+ title: "Is Sandbox"
+ description: "Whether or not to Sandbox or Production environment to extract\
+ \ data from"
+ type: "boolean"
+ default: false
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-paystack:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/paystack"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Paystack Source Spec"
+ type: "object"
+ required:
+ - "secret_key"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ secret_key:
+ type: "string"
+ pattern: "^(s|r)k_(live|test)_[a-zA-Z0-9]+$"
+ description: "Paystack API key (usually starts with 'sk_live_'; find yours\
+ \ here)."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ examples:
+ - "2017-01-25T00:00:00Z"
+ lookback_window_days:
+ type: "integer"
+ title: "Lookback Window (in days)"
+ default: 0
+ minimum: 0
+ description: "When set, the connector will always reload data from the past\
+ \ N days, where N is the value set here. This is useful if your data is\
+ \ updated after creation."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-pipedrive:0.1.6"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/pipedrive"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Pipedrive Spec"
+ type: "object"
+ required:
+ - "replication_start_date"
+ additionalProperties: true
+ properties:
+ authorization:
+ type: "object"
+ title: "Authentication Type"
+ oneOf:
+ - title: "Sign in via Pipedrive (OAuth)"
+ type: "object"
+ required:
+ - "auth_type"
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ properties:
+ auth_type:
+ type: "string"
+ const: "Client"
+ enum:
+ - "Client"
+ default: "Client"
+ order: 0
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID of your developer application"
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The client secret of your developer application"
+ airbyte_secret: true
+ access_token:
+ title: "Access Token"
+ type: "string"
+ description: "An access token generated using the above client ID\
+ \ and secret"
+ airbyte_secret: true
+ refresh_token:
+ title: "Refresh Token"
+ type: "string"
+ description: "A refresh token generated using the above client ID\
+ \ and secret"
+ airbyte_secret: true
+ - type: "object"
+ title: "API Key Authentication"
+ required:
+ - "auth_type"
+ - "api_token"
+ properties:
+ auth_type:
+ type: "string"
+ const: "Token"
+ enum:
+ - "Token"
+ default: "Token"
+ order: 0
+ api_token:
+ title: "API Token"
+ type: "string"
+ description: "Pipedrive API Token"
+ airbyte_secret: true
+ replication_start_date:
+ title: "Replication Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated. When specified and not\
+ \ None, then stream will behave as incremental"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2017-01-25T00:00:00Z"
+ type: "string"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+- dockerImage: "airbyte/source-plaid:0.2.1"
+ spec:
+ documentationUrl: "https://plaid.com/docs/api/"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ type: "object"
+ required:
+ - "access_token"
+ - "api_key"
+ - "client_id"
+ additionalProperties: false
+ properties:
+ access_token:
+ type: "string"
+ title: "Access Token"
+ description: "The end-user's Link access token."
+ api_key:
+ title: "API Key"
+ type: "string"
+ description: "The Plaid API key to use to hit the API."
+ airbyte_secret: true
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Plaid client id"
+ plaid_env:
+ title: "Plaid Environment"
+ type: "string"
+ enum:
+ - "sandbox"
+ - "development"
+ - "production"
+ description: "The Plaid environment"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-pokeapi:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/pokeapi"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Pokeapi Spec"
+ type: "object"
+ required:
+ - "pokemon_name"
+ additionalProperties: false
+ properties:
+ pokemon_name:
+ type: "string"
+ description: "Pokemon requested from the API."
+ pattern: "^[a-z0-9_\\-]+$"
+ examples:
+ - "ditto, luxray, snorlax"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-posthog:0.1.4"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/posthog"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "PostHog Spec"
+ type: "object"
+ required:
+ - "api_key"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ start_date:
+ title: "Start Date"
+ type: "string"
+ description: "The date from which you'd like to replicate the data"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2021-01-01T00:00:00Z"
+ api_key:
+ type: "string"
+ airbyte_secret: true
+ description: "API Key. See the docs for information on how to generate this key."
+ base_url:
+ type: "string"
+ default: "https://app.posthog.com"
+ description: "Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com)."
+ examples:
+ - "https://posthog.example.com"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-postgres:0.3.13"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/postgres"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Postgres Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ additionalProperties: false
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 5432
+ examples:
+ - "5432"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ order: 3
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ ssl:
+ title: "Connect using SSL"
+ description: "Encrypt client/server communications for increased security."
+ type: "boolean"
+ default: false
+ order: 5
+ replication_method:
+ type: "object"
+ title: "Replication Method"
+ description: "Replication method to use for extracting data from the database."
+ order: 6
+ oneOf:
+ - title: "Standard"
+ additionalProperties: false
+ description: "Standard replication requires no setup on the DB side but\
+ \ will not be able to represent deletions incrementally."
+ required:
+ - "method"
+ properties:
+ method:
+ type: "string"
+ const: "Standard"
+ enum:
+ - "Standard"
+ default: "Standard"
+ order: 0
+ - title: "Logical Replication (CDC)"
+ additionalProperties: false
+ description: "Logical replication uses the Postgres write-ahead log (WAL)\
+ \ to detect inserts, updates, and deletes. This needs to be configured\
+ \ on the source database itself. Only available on Postgres 10 and above.\
+ \ Read the Postgres Source docs for more information."
+ required:
+ - "method"
+ - "replication_slot"
+ - "publication"
+ properties:
+ method:
+ type: "string"
+ const: "CDC"
+ enum:
+ - "CDC"
+ default: "CDC"
+ order: 0
+ plugin:
+ type: "string"
+ description: "A logical decoding plug-in installed on the PostgreSQL\
+ \ server. `pgoutput` plug-in is used by default.\nIf replication\
+ \ table contains a lot of big jsonb values it is recommended to\
+ \ use `wal2json` plug-in. For more information about `wal2json`\
+ \ plug-in read Postgres Source docs."
+ enum:
+ - "pgoutput"
+ - "wal2json"
+ default: "pgoutput"
+ order: 1
+ replication_slot:
+ type: "string"
+ description: "A plug-in logical replication slot."
+ order: 2
+ publication:
+ type: "string"
+ description: "A Postgres publication used for consuming changes."
+ order: 3
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-prestashop:0.1.0"
+ spec:
+ documentationUrl: "https://docsurl.com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "PrestaShop Spec"
+ type: "object"
+ required:
+ - "url"
+ - "access_key"
+ additionalProperties: false
+ properties:
+ url:
+ type: "string"
+ description: "Shop URL without trailing slash (domain name or IP address)"
+ access_key:
+ type: "string"
+ description: "Your PrestaShop access key. See the docs for info on how to obtain this."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-quickbooks-singer:0.1.3"
+ spec:
+ documentationUrl: "https://docsurl.com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Quickbooks Singer Spec"
+ type: "object"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ - "realm_id"
+ - "user_agent"
+ - "start_date"
+ - "sandbox"
+ additionalProperties: false
+ properties:
+ client_id:
+ type: "string"
+ description: "Identifies which app is making the request. Obtain this value\
+ \ from the Keys tab on the app profile via My Apps on the developer site.\
+ \ There are two versions of this key: development and production"
+ client_secret:
+ description: " Obtain this value from the Keys tab on the app profile via\
+ \ My Apps on the developer site. There are two versions of this key: development\
+ \ and production"
+ type: "string"
+ airbyte_secret: true
+ refresh_token:
+ description: "A token used when refreshing the access token."
+ type: "string"
+ airbyte_secret: true
+ realm_id:
+ description: "Labeled Company ID. The Make API Calls panel is populated\
+ \ with the realm id and the current access token"
+ type: "string"
+ airbyte_secret: true
+ user_agent:
+ type: "string"
+ description: "Process and email for API logging purposes. Example: tap-quickbooks\
+ \ "
+ start_date:
+ description: "The default value to use if no bookmark exists for an endpoint\
+ \ (rfc3339 date string) E.g, 2021-03-20T00:00:00Z"
+ type: "string"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2021-03-20T00:00:00Z"
+ sandbox:
+ description: "Development or Production."
+ type: "boolean"
+ default: false
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-recharge:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/recharge"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Recharge Spec"
+ type: "object"
+ required:
+ - "start_date"
+ - "access_token"
+ additionalProperties: false
+ properties:
+ start_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data for Recharge\
+ \ API, in the format YYYY-MM-DDT00:00:00Z."
+ examples:
+ - "2021-05-14T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ access_token:
+ type: "string"
+ description: "The value of the Access Token generated. See the docs for more\
+ \ information"
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-recurly:0.2.4"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/recurly"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Recurly Source Spec"
+ type: "object"
+ required:
+ - "api_key"
+ additionalProperties: false
+ properties:
+ api_key:
+ type: "string"
+ description: "Recurly API Key. See the docs for more information on how to generate this key."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-redshift:0.3.4"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Redshift Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ - "password"
+ additionalProperties: false
+ properties:
+ host:
+ description: "Host Endpoint of the Redshift Cluster (must include the cluster-id,\
+ \ region and end with .redshift.amazonaws.com)"
+ type: "string"
+ port:
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 5439
+ examples:
+ - "5439"
+ database:
+ description: "Name of the database."
+ type: "string"
+ examples:
+ - "master"
+ username:
+ description: "Username to use to access the database."
+ type: "string"
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-retently:0.1.0"
+ spec:
+ documentationUrl: "https://docsurl.com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Retently Api Spec"
+ type: "object"
+ required:
+ - "api_key"
+ additionalProperties: false
+ properties:
+ api_key:
+ type: "string"
+ description: "API key from https://app.retently.com/settings/api/tokens"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-s3:0.1.6"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/s3"
+ changelogUrl: "https://docs.airbyte.io/integrations/sources/s3"
+ connectionSpecification:
+ title: "S3 Source Spec"
+ type: "object"
+ properties:
+ dataset:
+ title: "Dataset"
+ description: "This source creates one table per connection, this field is\
+ \ the name of that table. This should include only letters, numbers, dash\
+ \ and underscores. Note that this may be altered according to destination."
+ pattern: "^([A-Za-z0-9-_]+)$"
+ type: "string"
+ path_pattern:
+ title: "Path Pattern"
+ description: "Add at least 1 pattern here to match filepaths against. Use\
+ \ | to separate multiple patterns. Airbyte uses these patterns to determine\
+ \ which files to pick up from the provider storage. See wcmatch.glob to understand pattern syntax (GLOBSTAR\
+ \ and SPLIT flags are enabled). Use pattern ** to pick\
+ \ up all files."
+ examples:
+ - "**"
+ - "myFolder/myTableFiles/*.csv|myFolder/myOtherTableFiles/*.csv"
+ type: "string"
+ schema:
+ title: "Schema"
+ description: "Optionally provide a schema to enforce, as a valid JSON string.\
+ \ Ensure this is a mapping of { \"column\" : \"type\" },\
+ \ where types are valid JSON Schema datatypes. Leave as {} to auto-infer\
+ \ the schema."
+ default: "{}"
+ examples:
+ - "{\"column_1\": \"number\", \"column_2\": \"string\", \"column_3\": \"\
+ array\", \"column_4\": \"object\", \"column_5\": \"boolean\"}"
+ type: "string"
+ format:
+ title: "Format"
+ default: "csv"
+ type: "object"
+ oneOf:
+ - title: "csv"
+ description: "This connector utilises PyArrow (Apache Arrow) for CSV parsing."
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ const: "csv"
+ type: "string"
+ delimiter:
+ title: "Delimiter"
+ description: "The character delimiting individual cells in the CSV\
+ \ data. This may only be a 1-character string."
+ default: ","
+ minLength: 1
+ type: "string"
+ quote_char:
+ title: "Quote Char"
+ description: "The character used optionally for quoting CSV values.\
+ \ To disallow quoting, make this field blank."
+ default: "\""
+ type: "string"
+ escape_char:
+ title: "Escape Char"
+ description: "The character used optionally for escaping special characters.\
+ \ To disallow escaping, leave this field blank."
+ type: "string"
+ encoding:
+ title: "Encoding"
+ description: "The character encoding of the CSV data. Leave blank\
+ \ to default to UTF-8. See list of python encodings for allowable options."
+ type: "string"
+ double_quote:
+ title: "Double Quote"
+ description: "Whether two quotes in a quoted CSV value denote a single\
+ \ quote in the data."
+ default: true
+ type: "boolean"
+ newlines_in_values:
+ title: "Newlines In Values"
+ description: "Whether newline characters are allowed in CSV values.\
+ \ Turning this on may affect performance. Leave blank to default\
+ \ to False."
+ default: false
+ type: "boolean"
+ block_size:
+ title: "Block Size"
+ description: "The chunk size in bytes to process at a time in memory\
+ \ from each file. If your data is particularly wide and failing\
+ \ during schema detection, increasing this should solve it. Beware\
+ \ of raising this too high as you could hit OOM errors."
+ default: 10000
+ type: "integer"
+ additional_reader_options:
+ title: "Additional Reader Options"
+ description: "Optionally add a valid JSON string here to provide additional\
+ \ options to the csv reader. Mappings must correspond to options\
+ \ detailed here. 'column_types' is used internally\
+ \ to handle schema so overriding that would likely cause problems."
+ default: "{}"
+ examples:
+ - "{\"timestamp_parsers\": [\"%m/%d/%Y %H:%M\", \"%Y/%m/%d %H:%M\"\
+ ], \"strings_can_be_null\": true, \"null_values\": [\"NA\", \"NULL\"\
+ ]}"
+ type: "string"
+ advanced_options:
+ title: "Advanced Options"
+ description: "Optionally add a valid JSON string here to provide additional\
+ \ Pyarrow ReadOptions. Specify 'column_names'\
+ \ here if your CSV doesn't have header, or if you want to use custom\
+ \ column names. 'block_size' and 'encoding' are already used above,\
+ \ specify them again here will override the values above."
+ default: "{}"
+ examples:
+ - "{\"column_names\": [\"column1\", \"column2\"]}"
+ type: "string"
+ - title: "parquet"
+ description: "This connector utilises PyArrow (Apache Arrow) for Parquet parsing."
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ const: "parquet"
+ type: "string"
+ buffer_size:
+ title: "Buffer Size"
+ description: "Perform read buffering when deserializing individual\
+ \ column chunks. By default every group column will be loaded fully\
+ \ to memory. This option can help to optimize a work with memory\
+ \ if your data is particularly wide or failing during detection\
+ \ of OOM errors."
+ default: 0
+ type: "integer"
+ columns:
+ title: "Columns"
+ description: "If you only want to sync a subset of the columns from\
+ \ the file(s), add the columns you want here. Leave it empty to\
+ \ sync all columns."
+ type: "array"
+ items:
+ type: "string"
+ batch_size:
+ title: "Batch Size"
+ description: "Maximum number of records per batch. Batches may be\
+ \ smaller if there aren’t enough rows in the file. This option can\
+ \ help to optimize a work with memory if your data is particularly\
+ \ wide or failing during detection of OOM errors."
+ default: 65536
+ type: "integer"
+ provider:
+ title: "S3: Amazon Web Services"
+ type: "object"
+ properties:
+ bucket:
+ title: "Bucket"
+ description: "Name of the S3 bucket where the file(s) exist."
+ type: "string"
+ aws_access_key_id:
+ title: "Aws Access Key Id"
+ description: "In order to access private Buckets stored on AWS S3, this\
+ \ connector requires credentials with the proper permissions. If accessing\
+ \ publicly available data, this field is not necessary."
+ airbyte_secret: true
+ type: "string"
+ aws_secret_access_key:
+ title: "Aws Secret Access Key"
+ description: "In order to access private Buckets stored on AWS S3, this\
+ \ connector requires credentials with the proper permissions. If accessing\
+ \ publicly available data, this field is not necessary."
+ airbyte_secret: true
+ type: "string"
+ path_prefix:
+ title: "Path Prefix"
+ description: "By providing a path-like prefix (e.g. myFolder/thisTable/)\
+ \ under which all the relevant files sit, we can optimise finding\
+ \ these in S3. This is optional but recommended if your bucket contains\
+ \ many folders/files."
+ default: ""
+ type: "string"
+ endpoint:
+ title: "Endpoint"
+ description: "Endpoint to an S3 compatible service. Leave empty to use\
+ \ AWS."
+ default: ""
+ type: "string"
+ use_ssl:
+ title: "Use Ssl"
+ description: "Is remote server using secure SSL/TLS connection"
+ type: "boolean"
+ verify_ssl_cert:
+ title: "Verify Ssl Cert"
+ description: "Allow self signed certificates"
+ type: "boolean"
+ required:
+ - "bucket"
+ required:
+ - "dataset"
+ - "path_pattern"
+ - "provider"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ - "append_dedup"
+- dockerImage: "airbyte/source-salesloft:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/salesloft"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Salesloft Spec"
+ type: "object"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ client_id:
+ type: "string"
+ description: "Salesloft client id."
+ client_secret:
+ type: "string"
+ description: "Salesloft client secret."
+ airbyte_secret: true
+ refresh_token:
+ type: "string"
+ description: "Salesloft refresh token."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data for Salesloft\
+ \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\
+ \ date will be replicated."
+ examples:
+ - "2020-11-16T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-salesforce:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/salesforce"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Salesforce Source Spec"
+ type: "object"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ - "start_date"
+ - "api_type"
+ additionalProperties: false
+ properties:
+ client_id:
+ description: "The Consumer Key that can be found when viewing your app in\
+ \ Salesforce"
+ type: "string"
+ client_secret:
+ description: "The Consumer Secret that can be found when viewing your app\
+ \ in Salesforce"
+ type: "string"
+ airbyte_secret: true
+ refresh_token:
+ description: "Salesforce Refresh Token used for Airbyte to access your Salesforce\
+ \ account. If you don't know what this is, follow this guide to retrieve it."
+ type: "string"
+ airbyte_secret: true
+ start_date:
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated. Priority for filtering\
+ \ by `updated` fields, and only then by `created` fields if they are available\
+ \ for stream."
+ type: "string"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2021-07-25T00:00:00Z"
+ is_sandbox:
+ description: "Whether or not the the app is in a Salesforce sandbox. If\
+ \ you do not know what this, assume it is false. We provide more info\
+ \ on this field in the docs."
+ type: "boolean"
+ default: false
+ api_type:
+ description: "Unless you know that you are transferring a very small amount\
+ \ of data, prefer using the BULK API. This will help avoid using up all\
+ \ of your API call quota with Salesforce. Valid values are BULK or REST."
+ type: "string"
+ enum:
+ - "BULK"
+ - "REST"
+ default: "BULK"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject: []
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ oauthFlowOutputParameters:
+ - - "refresh_token"
+- dockerImage: "airbyte/source-sendgrid:0.2.6"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/sendgrid"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Sendgrid Spec"
+ type: "object"
+ required:
+ - "apikey"
+ additionalProperties: false
+ properties:
+ apikey:
+ type: "string"
+ description: "API Key, use admin to generate this key."
+ start_time:
+ type: "integer"
+ description: "Start time in timestamp integer format. Any data before this\
+ \ timestamp will not be replicated."
+ examples:
+ - 1558359837
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-shopify:0.1.22"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/shopify"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Shopify Source CDK Specifications"
+ type: "object"
+ required:
+ - "shop"
+ - "start_date"
+ - "auth_method"
+ additionalProperties: false
+ properties:
+ shop:
+ type: "string"
+ description: "The name of the shopify store. For https://EXAMPLE.myshopify.com,\
+ \ the shop name is 'EXAMPLE'."
+ start_date:
+ type: "string"
+ description: "The date you would like to replicate data. Format: YYYY-MM-DD."
+ examples:
+ - "2021-01-01"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ auth_method:
+ title: "Shopify Authorization Method"
+ type: "object"
+ oneOf:
+ - type: "object"
+ title: "OAuth2.0"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "access_token"
+ properties:
+ auth_method:
+ type: "string"
+ const: "access_token"
+ enum:
+ - "access_token"
+ default: "access_token"
+ order: 0
+ client_id:
+ type: "string"
+ description: "The API Key of the Shopify developer application."
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ description: "The API Secret the Shopify developer application."
+ airbyte_secret: true
+ access_token:
+ type: "string"
+ description: "Access Token for making authenticated requests."
+ airbyte_secret: true
+ - title: "API Password"
+ type: "object"
+ required:
+ - "api_password"
+ properties:
+ auth_method:
+ type: "string"
+ const: "api_password"
+ enum:
+ - "api_password"
+ default: "api_password"
+ order: 0
+ api_password:
+ type: "string"
+ description: "The API PASSWORD for your private application in `Shopify`\
+ \ shop."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject:
+ - "auth_method"
+ - "0"
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ oauthFlowOutputParameters:
+ - - "access_token"
+- dockerImage: "airbyte/source-shortio:0.1.0"
+ spec:
+ documentationUrl: "https://developers.short.io/reference"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Shortio Spec"
+ type: "object"
+ required:
+ - "domain_id"
+ - "secret_key"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ domain_id:
+ type: "string"
+ description: "Domain ID"
+ airbyte_secret: false
+ secret_key:
+ type: "string"
+ description: "Short.io Secret key"
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ description: "Start Date, YYYY-MM-DD"
+ airbyte_secret: false
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-slack:0.1.12"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/slack"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Slack Spec"
+ type: "object"
+ required:
+ - "start_date"
+ - "lookback_window"
+ - "join_channels"
+ additionalProperties: true
+ properties:
+ start_date:
+ type: "string"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ examples:
+ - "2017-01-25T00:00:00Z"
+ title: "Start Date"
+ lookback_window:
+ type: "integer"
+ title: "Threads Lookback window (Days)"
+ description: "How far into the past to look for messages in threads."
+ examples:
+ - 7
+ - 14
+ join_channels:
+ type: "boolean"
+ default: true
+ title: "Join all channels"
+ description: "Whether to join all channels or to sync data only from channels\
+ \ the bot is already in. If false, you'll need to manually add the bot\
+ \ to all the channels from which you'd like to sync messages. "
+ credentials:
+ title: "Authentication mechanism"
+ description: "Choose how to authenticate into Slack"
+ type: "object"
+ oneOf:
+ - type: "object"
+ title: "Sign in via Slack (OAuth)"
+ required:
+ - "access_token"
+ - "client_id"
+ - "client_secret"
+ - "option_title"
+ properties:
+ option_title:
+ type: "string"
+ const: "Default OAuth2.0 authorization"
+ client_id:
+ title: "Client ID"
+ description: "Slack client_id. See our docs if you need help finding this id."
+ type: "string"
+ examples:
+ - "slack-client-id-example"
+ client_secret:
+ title: "Client Secret"
+ description: "Slack client_secret. See our docs if you need help finding this secret."
+ type: "string"
+ examples:
+ - "slack-client-secret-example"
+ airbyte_secret: true
+ access_token:
+ title: "Access token"
+ description: "Slack access_token. See our docs if you need help generating the token."
+ type: "string"
+ examples:
+ - "slack-access-token-example"
+ airbyte_secret: true
+ refresh_token:
+ title: "Refresh token"
+ description: "Slack refresh_token. See our docs if you need help generating the token."
+ type: "string"
+ examples:
+ - "slack-refresh-token-example"
+ airbyte_secret: true
+ order: 0
+ - type: "object"
+ title: "API Token"
+ required:
+ - "api_token"
+ - "option_title"
+ properties:
+ option_title:
+ type: "string"
+ const: "API Token Credentials"
+ api_token:
+ type: "string"
+ title: "API Token"
+ description: "A Slack bot token. See the docs for instructions on how to generate it."
+ airbyte_secret: true
+ order: 1
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject:
+ - "credentials"
+ - "0"
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ oauthFlowOutputParameters:
+ - - "access_token"
+ - - "refresh_token"
+- dockerImage: "airbyte/source-smartsheets:0.1.5"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/smartsheets"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Smartsheets Source Spec"
+ type: "object"
+ required:
+ - "access_token"
+ - "spreadsheet_id"
+ additionalProperties: false
+ properties:
+ access_token:
+ title: "API Access token"
+ description: "Found in Profile > Apps & Integrations > API Access within\
+ \ Smartsheet app"
+ type: "string"
+ airbyte_secret: true
+ spreadsheet_id:
+ title: "Smartsheet ID"
+ description: "Found in File > Properties"
+ type: "string"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-snapchat-marketing:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/snapchat-marketing"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Snapchat Marketing Spec"
+ type: "object"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ additionalProperties: false
+ properties:
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Snapchat Client ID for API credentials."
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The Client Secret for a given Client ID."
+ airbyte_secret: true
+ refresh_token:
+ title: "API Refresh Token"
+ type: "string"
+ description: "Refresh Token to get next api key after expiration. Is given\
+ \ with API Key"
+ airbyte_secret: true
+ start_date:
+ title: "Start Date"
+ type: "string"
+ description: "The start date to sync data. Leave blank for full sync. Format:\
+ \ YYYY-MM-DD."
+ examples:
+ - "2021-01-01"
+ default: "1970-01-01"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-snowflake:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/snowflake"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Snowflake Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "role"
+ - "warehouse"
+ - "database"
+ - "schema"
+ - "username"
+ - "password"
+ additionalProperties: false
+ properties:
+ host:
+ description: "Host domain of the snowflake instance (must include the account,\
+ \ region, cloud environment, and end with snowflakecomputing.com)."
+ examples:
+ - "accountname.us-east-2.aws.snowflakecomputing.com"
+ type: "string"
+ title: "Account name"
+ order: 0
+ role:
+ description: "The role you created for Airbyte to access Snowflake."
+ examples:
+ - "AIRBYTE_ROLE"
+ type: "string"
+ title: "Role"
+ order: 1
+ warehouse:
+ description: "The warehouse you created for Airbyte to access data into."
+ examples:
+ - "AIRBYTE_WAREHOUSE"
+ type: "string"
+ title: "Warehouse"
+ order: 2
+ database:
+ description: "The database you created for Airbyte to access data into."
+ examples:
+ - "AIRBYTE_DATABASE"
+ type: "string"
+ title: "Database"
+ order: 3
+ schema:
+ description: "The source Snowflake schema tables."
+ examples:
+ - "AIRBYTE_SCHEMA"
+ type: "string"
+ title: "Schema"
+ order: 4
+ username:
+ description: "The username you created to allow Airbyte to access the database."
+ examples:
+ - "AIRBYTE_USER"
+ type: "string"
+ title: "Username"
+ order: 5
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ title: "Password"
+ order: 6
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-square:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/square"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Square Source CDK Specifications"
+ type: "object"
+ required:
+ - "api_key"
+ - "is_sandbox"
+ additionalProperties: false
+ properties:
+ api_key:
+ type: "string"
+ description: "The API key for a Square application"
+ airbyte_secret: true
+ is_sandbox:
+ type: "boolean"
+ description: "Determines the sandbox (true) or production (false) API version"
+ examples:
+ - true
+ - false
+ default: true
+ start_date:
+ type: "string"
+ description: "The start date to sync data. Leave blank for full sync. Format:\
+ \ YYYY-MM-DD."
+ examples:
+ - "2021-01-01"
+ default: "1970-01-01"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ include_deleted_objects:
+ type: "boolean"
+ description: "In some streams there is and option to include deleted objects\
+ \ (Items, Categories, Discounts, Taxes)"
+ examples:
+ - true
+ - false
+ default: false
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-strava:0.1.0"
+ spec:
+ documentationUrl: "https://docsurl.com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Strava Spec"
+ type: "object"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ - "athlete_id"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ client_id:
+ type: "string"
+ description: "Strava Client ID"
+ pattern: "^[0-9_\\-]+$"
+ examples:
+ - "12345"
+ client_secret:
+ type: "string"
+ description: "Strava Client Secret"
+ pattern: "^[0-9a-fA-F]+$"
+ examples:
+ - "fc6243f283e51f6ca989aab298b17da125496f50"
+ airbyte_secret: true
+ refresh_token:
+ type: "string"
+ description: "Strava Refresh Token with activity:read_all permissions"
+ pattern: "^[0-9a-fA-F]+$"
+ examples:
+ - "fc6243f283e51f6ca989aab298b17da125496f50"
+ airbyte_secret: true
+ athlete_id:
+ type: "integer"
+ description: "Strava Athlete ID"
+ pattern: "^[0-9_\\-]+$"
+ examples:
+ - "17831421"
+ start_date:
+ type: "string"
+ description: "Start Query Timestamp in UTC"
+ examples:
+ - "2016-12-31 23:59:59"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-stripe:0.1.22"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/stripe"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Stripe Source Spec"
+ type: "object"
+ required:
+ - "client_secret"
+ - "account_id"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ client_secret:
+ type: "string"
+ pattern: "^(s|r)k_(live|test)_[a-zA-Z0-9]+$"
+ description: "Stripe API key (usually starts with 'sk_live_'; find yours\
+ \ here)."
+ airbyte_secret: true
+ account_id:
+ type: "string"
+ description: "Your Stripe account ID (starts with 'acct_', find yours here)."
+ start_date:
+ type: "string"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ examples:
+ - "2017-01-25T00:00:00Z"
+ lookback_window_days:
+ type: "integer"
+ title: "Lookback Window (in days)"
+ default: 0
+ minimum: 0
+ description: "When set, the connector will always reload data from the past\
+ \ N days, where N is the value set here. This is useful if your data is\
+ \ updated after creation."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-surveymonkey:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/surveymonkey"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "SurveyMonkey Spec"
+ type: "object"
+ required:
+ - "start_date"
+ additionalProperties: true
+ properties:
+ start_date:
+ title: "Start Date"
+ type: "string"
+ description: "The date from which you'd like to replicate the data"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z?$"
+ examples:
+ - "2021-01-01T00:00:00Z"
+ access_token:
+ title: "Access Token"
+ type: "string"
+ airbyte_secret: true
+ description: "API Token. See the docs for information on how to generate this key."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject: []
+ oauthFlowInitParameters: []
+ oauthFlowOutputParameters:
+ - - "access_token"
+- dockerImage: "airbyte/source-tempo:0.2.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Tempo Spec"
+ type: "object"
+ required:
+ - "api_token"
+ additionalProperties: false
+ properties:
+ api_token:
+ type: "string"
+ description: "Tempo API Token."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-tiktok-marketing:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing"
+ changelogUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing"
+ connectionSpecification:
+ title: "TikTok Marketing Source Spec"
+ type: "object"
+ properties:
+ environment:
+ title: "Environment"
+ default: "Production"
+ oneOf:
+ - title: "Production"
+ type: "object"
+ properties:
+ environment:
+ title: "Environment"
+ const: "prod"
+ type: "string"
+ app_id:
+ title: "App Id"
+ description: "The App id applied by the developer."
+ type: "string"
+ secret:
+ title: "Secret"
+ description: "The private key of the developer's application."
+ airbyte_secret: true
+ type: "string"
+ required:
+ - "app_id"
+ - "secret"
+ - title: "Sandbox"
+ type: "object"
+ properties:
+ environment:
+ title: "Environment"
+ const: "sandbox"
+ type: "string"
+ advertiser_id:
+ title: "Advertiser Id"
+ description: "The Advertiser ID which generated for the developer's\
+ \ Sandbox application."
+ type: "string"
+ required:
+ - "advertiser_id"
+ type: "object"
+ access_token:
+ title: "Access Token"
+ description: "Long-term Authorized Access Token."
+ airbyte_secret: true
+ type: "string"
+ start_date:
+ title: "Start Date"
+ description: "Start Date in format: YYYY-MM-DD."
+ default: "01-09-2016"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ type: "string"
+ required:
+ - "access_token"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ - "append_dedup"
+- dockerImage: "airbyte/source-trello:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/trello"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Trello Spec"
+ type: "object"
+ required:
+ - "token"
+ - "key"
+ - "start_date"
+ additionalProperties: true
+ properties:
+ token:
+ type: "string"
+ title: "API token"
+ description: "A Trello token. See the docs for instructions on how to generate it."
+ airbyte_secret: true
+ key:
+ type: "string"
+ title: "API key"
+ description: "A Trello token. See the docs for instructions on how to generate it."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ title: "Start date"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ examples:
+ - "2021-03-01T00:00:00.000Z"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject: []
+ oauthFlowInitParameters: []
+ oauthFlowOutputParameters:
+ - - "token"
+ - - "key"
+- dockerImage: "airbyte/source-twilio:0.1.1"
+ spec:
+ documentationUrl: "https://hub.docker.com/r/airbyte/source-twilio"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Twilio Spec"
+ type: "object"
+ required:
+ - "account_sid"
+ - "auth_token"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ account_sid:
+ title: "Account ID"
+ description: "Twilio account SID"
+ airbyte_secret: true
+ type: "string"
+ auth_token:
+ title: "Auth Token"
+ description: "Twilio Auth Token."
+ airbyte_secret: true
+ type: "string"
+ start_date:
+ title: "Replication Start Date"
+ description: "UTC date and time in the format 2020-10-01T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2020-10-01T00:00:00Z"
+ type: "string"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+- dockerImage: "airbyte/source-typeform:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/typeform"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Typeform Spec"
+ type: "object"
+ required:
+ - "token"
+ - "start_date"
+ additionalProperties: true
+ properties:
+ start_date:
+ type: "string"
+ description: "The date you would like to replicate data. Format: YYYY-MM-DDTHH:mm:ss[Z]."
+ examples:
+ - "2020-01-01T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ token:
+ type: "string"
+ description: "The API Token for a Typeform account."
+ airbyte_secret: true
+ form_ids:
+ title: "Form IDs to replicate"
+ description: "When this parameter is set, the connector will replicate data\
+ \ only from the input forms. Otherwise, all forms in your Typeform account\
+ \ will be replicated. You can find form IDs in your form URLs. For example,\
+ \ in the URL \"https://mysite.typeform.com/to/u6nXL7\" the form_id is\
+ \ u6nXL7. You can find form URLs on Share panel"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-us-census:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/us-census"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "https://api.census.gov/ Source Spec"
+ type: "object"
+ required:
+ - "api_key"
+ - "query_path"
+ additionalProperties: false
+ properties:
+ query_params:
+ type: "string"
+ description: "The query parameters portion of the GET request, without the\
+ \ api key"
+ pattern: "^\\w+=[\\w,:*]+(&(?!key)\\w+=[\\w,:*]+)*$"
+ examples:
+ - "get=NAME,NAICS2017_LABEL,LFO_LABEL,EMPSZES_LABEL,ESTAB,PAYANN,PAYQTR1,EMP&for=us:*&NAICS2017=72&LFO=001&EMPSZES=001"
+ - "get=MOVEDIN,GEOID1,GEOID2,MOVEDOUT,FULL1_NAME,FULL2_NAME,MOVEDNET&for=county:*"
+ query_path:
+ type: "string"
+ description: "The path portion of the GET request"
+ pattern: "^data(\\/[\\w\\d]+)+$"
+ examples:
+ - "data/2019/cbp"
+ - "data/2018/acs"
+ - "data/timeseries/healthins/sahie"
+ api_key:
+ type: "string"
+ description: "Your API Key. Get your key here."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-zendesk-chat:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/zendesk-chat"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Zendesk Chat Spec"
+ type: "object"
+ required:
+ - "start_date"
+ - "access_token"
+ additionalProperties: false
+ properties:
+ start_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data for Zendesk\
+ \ Chat API, in the format YYYY-MM-DDT00:00:00Z."
+ examples:
+ - "2021-02-01T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ access_token:
+ type: "string"
+ description: "The value of the Access Token generated. See the docs for\
+ \ more information"
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-zendesk-sunshine:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/zendesk_sunshine"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Zendesk Sunshine Spec"
+ type: "object"
+ required:
+ - "api_token"
+ - "email"
+ - "start_date"
+ - "subdomain"
+ additionalProperties: false
+ properties:
+ api_token:
+ type: "string"
+ airbyte_secret: true
+ description: "API Token. See the docs for information on how to generate this key."
+ email:
+ type: "string"
+ description: "The user email for your Zendesk account"
+ subdomain:
+ type: "string"
+ description: "The subdomain for your Zendesk Account"
+ start_date:
+ title: "Start Date"
+ type: "string"
+ description: "The date from which you'd like to replicate the data"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples: "2021-01-01T00:00:00.000000Z"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-zendesk-support:0.1.4"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/zendesk-support"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Zendesk Support Spec"
+ type: "object"
+ required:
+ - "start_date"
+ - "subdomain"
+ - "auth_method"
+ additionalProperties: false
+ properties:
+ start_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data for Zendesk\
+ \ Support API, in the format YYYY-MM-DDT00:00:00Z. All data generated\
+ \ after this date will be replicated."
+ examples:
+ - "2020-10-15T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ subdomain:
+ type: "string"
+ description: "The subdomain for your Zendesk Support"
+ auth_method:
+ title: "ZenDesk Authorization Method"
+ type: "object"
+ default: "api_token"
+ description: "Zendesk service provides 2 auth method: API token and oAuth2.\
+ \ Now only the first one is available. Another one will be added in the\
+ \ future"
+ oneOf:
+ - title: "API Token"
+ type: "object"
+ required:
+ - "email"
+ - "api_token"
+ additionalProperties: false
+ properties:
+ auth_method:
+ type: "string"
+ const: "api_token"
+ email:
+ type: "string"
+ description: "The user email for your Zendesk account"
+ api_token:
+ type: "string"
+ description: "The value of the API token generated. See the docs\
+ \ for more information"
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-zendesk-talk:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/zendesk-talk"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Zendesk Talk Spec"
+ type: "object"
+ required:
+ - "start_date"
+ - "subdomain"
+ - "access_token"
+ - "email"
+ additionalProperties: false
+ properties:
+ start_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data for Zendesk\
+ \ Talk API, in the format YYYY-MM-DDT00:00:00Z."
+ examples:
+ - "2021-04-01T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ subdomain:
+ type: "string"
+ description: "The subdomain for your Zendesk Talk"
+ access_token:
+ type: "string"
+ description: "The value of the API token generated. See the docs for more information"
+ airbyte_secret: true
+ email:
+ type: "string"
+ description: "The user email for your Zendesk account"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-sentry:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/sentry"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Sentry Spec"
+ type: "object"
+ required:
+ - "auth_token"
+ - "organization"
+ - "project"
+ additionalProperties: false
+ properties:
+ auth_token:
+ type: "string"
+ title: "Authentication tokens"
+ description: "Log into Sentry and then create authentication tokens.For self-hosted, you can find or create\
+ \ authentication tokens by visiting \"{instance_url_prefix}/settings/account/api/auth-tokens/\""
+ airbyte_secret: true
+ hostname:
+ type: "string"
+ title: "Host Name"
+ description: "Host name of Sentry API server.For self-hosted, specify your\
+ \ host name here. Otherwise, leave it empty."
+ default: "sentry.io"
+ organization:
+ type: "string"
+ title: "Organization"
+ description: "The slug of the organization the groups belong to."
+ project:
+ type: "string"
+ title: "Project"
+ description: "The slug of the project the groups belong to."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-zoom-singer:0.2.4"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/zoom"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Zoom Singer Spec"
+ type: "object"
+ required:
+ - "jwt"
+ additionalProperties: false
+ properties:
+ jwt:
+ title: "JWT Token"
+ type: "string"
+ description: "Zoom JWT Token. See the docs for more information on how to obtain this key."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-zuora:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/zuora"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Zuora Connector Configuration"
+ type: "object"
+ required:
+ - "start_date"
+ - "tenant_endpoint"
+ - "data_query"
+ - "client_id"
+ - "client_secret"
+ properties:
+ start_date:
+ type: "string"
+ title: "Start Date"
+ description: "Start Date in format: YYYY-MM-DD"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ window_in_days:
+ type: "string"
+ title: "Query Window (in days)"
+ description: "The amount of days for each data-chunk begining from start_date.\
+ \ Bigger the value - faster the fetch. (0.1 - as for couple of hours,\
+ \ 1 - as for a Day; 364 - as for a Year)."
+ examples:
+ - "0.5"
+ - "1"
+ - "30"
+ - "60"
+ - "90"
+ - "120"
+ - "200"
+ - "364"
+ pattern: "^(0|[1-9]\\d*)(\\.\\d+)?$"
+ default: "90"
+ tenant_endpoint:
+ title: "Tenant Endpoint Location"
+ type: "string"
+ description: "Please choose the right endpoint where your Tenant is located.\
+ \ More info by this Link"
+ enum:
+ - "US Production"
+ - "US Cloud Production"
+ - "US API Sandbox"
+ - "US Cloud API Sandbox"
+ - "US Central Sandbox"
+ - "US Performance Test"
+ - "EU Production"
+ - "EU API Sandbox"
+ - "EU Central Sandbox"
+ data_query:
+ title: "Data Query Type"
+ type: "string"
+ description: "Choose between `Live`, or `Unlimited` - the optimized, replicated\
+ \ database at 12 hours freshness for high volume extraction Link"
+ enum:
+ - "Live"
+ - "Unlimited"
+ default: "Live"
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "Your OAuth user Client ID"
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "Your OAuth user Client Secret"
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/YamlSeedConfigPersistenceTest.java b/airbyte-config/init/src/test/java/io/airbyte/config/init/YamlSeedConfigPersistenceTest.java
similarity index 86%
rename from airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/YamlSeedConfigPersistenceTest.java
rename to airbyte-config/init/src/test/java/io/airbyte/config/init/YamlSeedConfigPersistenceTest.java
index 8a740ba535688..57090570f3e21 100644
--- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/YamlSeedConfigPersistenceTest.java
+++ b/airbyte-config/init/src/test/java/io/airbyte/config/init/YamlSeedConfigPersistenceTest.java
@@ -2,7 +2,7 @@
* Copyright (c) 2021 Airbyte, Inc., all rights reserved.
*/
-package io.airbyte.config.persistence;
+package io.airbyte.config.init;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
@@ -14,7 +14,9 @@
import io.airbyte.config.StandardSourceDefinition;
import io.airbyte.config.StandardSync;
import io.airbyte.config.StandardWorkspace;
+import io.airbyte.config.persistence.ConfigNotFoundException;
import java.io.IOException;
+import java.net.URI;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Stream;
@@ -41,6 +43,7 @@ public void testGetConfig() throws Exception {
assertEquals("airbyte/source-mysql", mysqlSource.getDockerRepository());
assertEquals("https://docs.airbyte.io/integrations/sources/mysql", mysqlSource.getDocumentationUrl());
assertEquals("mysql.svg", mysqlSource.getIcon());
+ assertEquals(URI.create("https://docs.airbyte.io/integrations/sources/mysql"), mysqlSource.getSpec().getDocumentationUrl());
// destination
final String s3DestinationId = "4816b78f-1489-44c1-9060-4b19d5fa9362";
@@ -50,13 +53,16 @@ public void testGetConfig() throws Exception {
assertEquals("S3", s3Destination.getName());
assertEquals("airbyte/destination-s3", s3Destination.getDockerRepository());
assertEquals("https://docs.airbyte.io/integrations/destinations/s3", s3Destination.getDocumentationUrl());
+ assertEquals(URI.create("https://docs.airbyte.io/integrations/destinations/s3"), s3Destination.getSpec().getDocumentationUrl());
}
@Test
public void testGetInvalidConfig() {
- assertThrows(UnsupportedOperationException.class,
+ assertThrows(
+ UnsupportedOperationException.class,
() -> PERSISTENCE.getConfig(ConfigSchema.STANDARD_SYNC, "invalid_id", StandardSync.class));
- assertThrows(ConfigNotFoundException.class,
+ assertThrows(
+ ConfigNotFoundException.class,
() -> PERSISTENCE.getConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, "invalid_id", StandardWorkspace.class));
}
diff --git a/airbyte-config/models/build.gradle b/airbyte-config/models/build.gradle
index 271b3fe685e7e..d62c88c7e1633 100644
--- a/airbyte-config/models/build.gradle
+++ b/airbyte-config/models/build.gradle
@@ -7,10 +7,11 @@ plugins {
dependencies {
implementation project(':airbyte-json-validation')
implementation project(':airbyte-protocol:models')
+ implementation project(':airbyte-commons')
}
jsonSchema2Pojo {
- sourceType = SourceType.YAMLSCHEMA
+ sourceType = SourceType.YAMLSCHEMA
source = files("${sourceSets.main.output.resourcesDir}/types")
targetDirectory = new File(project.buildDir, 'generated/src/gen/java/')
diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/Configs.java b/airbyte-config/models/src/main/java/io/airbyte/config/Configs.java
index 99b1fc3709159..6d3804ad9b20f 100644
--- a/airbyte-config/models/src/main/java/io/airbyte/config/Configs.java
+++ b/airbyte-config/models/src/main/java/io/airbyte/config/Configs.java
@@ -5,6 +5,7 @@
package io.airbyte.config;
import io.airbyte.commons.version.AirbyteVersion;
+import io.airbyte.config.helpers.LogConfigs;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
@@ -96,6 +97,8 @@ public interface Configs {
String getMemoryLimit();
// Logging
+ LogConfigs getLogConfigs();
+
String getS3LogBucket();
String getS3LogBucketRegion();
diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java b/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java
index cb61f7710eb8b..3950fab5d432b 100644
--- a/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java
+++ b/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java
@@ -9,6 +9,8 @@
import com.google.common.base.Strings;
import io.airbyte.commons.version.AirbyteVersion;
import io.airbyte.config.helpers.LogClientSingleton;
+import io.airbyte.config.helpers.LogConfigs;
+import io.airbyte.config.helpers.LogConfiguration;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.HashSet;
@@ -93,6 +95,7 @@ public class EnvConfigs implements Configs {
public static final String DEFAULT_NETWORK = "host";
private final Function getEnv;
+ private LogConfiguration logConfiguration;
public EnvConfigs() {
this(System::getenv);
@@ -100,6 +103,14 @@ public EnvConfigs() {
EnvConfigs(final Function getEnv) {
this.getEnv = getEnv;
+ this.logConfiguration = new LogConfiguration(
+ getEnvOrDefault(LogClientSingleton.S3_LOG_BUCKET, ""),
+ getEnvOrDefault(LogClientSingleton.S3_LOG_BUCKET_REGION, ""),
+ getEnvOrDefault(LogClientSingleton.AWS_ACCESS_KEY_ID, ""),
+ getEnvOrDefault(LogClientSingleton.AWS_SECRET_ACCESS_KEY, ""),
+ getEnvOrDefault(LogClientSingleton.S3_MINIO_ENDPOINT, ""),
+ getEnvOrDefault(LogClientSingleton.GCP_STORAGE_BUCKET, ""),
+ getEnvOrDefault(LogClientSingleton.GOOGLE_APPLICATION_CREDENTIALS, ""));
}
@Override
@@ -289,9 +300,11 @@ public String getJobImagePullPolicy() {
/**
* Returns worker pod tolerations parsed from its own environment variable. The value of the env is
* a string that represents one or more tolerations.
+ *
*
Tolerations are separated by a `;`
*
Each toleration contains k=v pairs mentioning some/all of key, effect, operator and value and
* separated by `,`
+ *
*
* For example:- The following represents two tolerations, one checking existence and another
* matching a value
@@ -399,37 +412,41 @@ public String getJobsImagePullSecret() {
@Override
public String getS3LogBucket() {
- return getEnvOrDefault(LogClientSingleton.S3_LOG_BUCKET, "");
+ return logConfiguration.getS3LogBucket();
}
@Override
public String getS3LogBucketRegion() {
- return getEnvOrDefault(LogClientSingleton.S3_LOG_BUCKET_REGION, "");
+ return logConfiguration.getS3LogBucketRegion();
}
@Override
public String getAwsAccessKey() {
- return getEnvOrDefault(LogClientSingleton.AWS_ACCESS_KEY_ID, "");
+ return logConfiguration.getAwsAccessKey();
}
@Override
public String getAwsSecretAccessKey() {
- return getEnvOrDefault(LogClientSingleton.AWS_SECRET_ACCESS_KEY, "");
+ return logConfiguration.getAwsSecretAccessKey();
}
@Override
public String getS3MinioEndpoint() {
- return getEnvOrDefault(LogClientSingleton.S3_MINIO_ENDPOINT, "");
+ return logConfiguration.getS3MinioEndpoint();
}
@Override
public String getGcpStorageBucket() {
- return getEnvOrDefault(LogClientSingleton.GCP_STORAGE_BUCKET, "");
+ return logConfiguration.getGcpStorageBucket();
}
@Override
public String getGoogleApplicationCredentials() {
- return getEnvOrDefault(LogClientSingleton.GOOGLE_APPLICATION_CREDENTIALS, "");
+ return logConfiguration.getGoogleApplicationCredentials();
+ }
+
+ public LogConfigs getLogConfigs() {
+ return logConfiguration;
}
@Override
@@ -443,7 +460,7 @@ public SecretPersistenceType getSecretPersistenceType() {
return SecretPersistenceType.valueOf(secretPersistenceStr);
}
- private String getEnvOrDefault(final String key, final String defaultValue) {
+ protected String getEnvOrDefault(final String key, final String defaultValue) {
return getEnvOrDefault(key, defaultValue, Function.identity(), false);
}
diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/GcsLogs.java b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/GcsLogs.java
index e248982892d28..5a0787366d46a 100644
--- a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/GcsLogs.java
+++ b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/GcsLogs.java
@@ -135,7 +135,7 @@ public static void main(final String[] args) throws IOException {
blob.downloadTo(os);
}
os.close();
- final var data = new GcsLogs().tailCloudLog(new LogConfigDelegator(new EnvConfigs()), "tail", 6);
+ final var data = new GcsLogs().tailCloudLog((new EnvConfigs()).getLogConfigs(), "tail", 6);
System.out.println(data);
}
diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogClientSingleton.java b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogClientSingleton.java
index 11924e72e31a0..c393017121d39 100644
--- a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogClientSingleton.java
+++ b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogClientSingleton.java
@@ -8,7 +8,6 @@
import io.airbyte.commons.io.IOs;
import io.airbyte.config.Configs;
import io.airbyte.config.Configs.WorkerEnvironment;
-import io.airbyte.config.EnvConfigs;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
@@ -19,11 +18,10 @@
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
-// todo (cgardens) - make this an actual singleton so we can write tests and mock the components.
/**
* Airbyte's logging layer entrypoint. Handles logs written to local disk as well as logs written to
* cloud storages.
- *
+ *
* Although the configuration is passed in as {@link Configs}, it is transformed to
* {@link LogConfigs} within this class. Beyond this class, all configuration consumption is via the
* {@link LogConfigs} interface via the {@link CloudLogs} interface.
@@ -31,51 +29,56 @@
public class LogClientSingleton {
private static final Logger LOGGER = LoggerFactory.getLogger(LogClientSingleton.class);
+ private static LogClientSingleton instance;
@VisibleForTesting
- static final int LOG_TAIL_SIZE = 1000000;
+ final static int LOG_TAIL_SIZE = 1000000;
@VisibleForTesting
- static CloudLogs logClient;
+ CloudLogs logClient;
// Any changes to the following values must also be propagated to the log4j2.xml in main/resources.
- public static String WORKSPACE_MDC_KEY = "workspace_app_root";
- public static String CLOUD_WORKSPACE_MDC_KEY = "cloud_workspace_app_root";
+ public static final String WORKSPACE_MDC_KEY = "workspace_app_root";
+ public static final String CLOUD_WORKSPACE_MDC_KEY = "cloud_workspace_app_root";
- public static String JOB_LOG_PATH_MDC_KEY = "job_log_path";
- public static String CLOUD_JOB_LOG_PATH_MDC_KEY = "cloud_job_log_path";
+ public static final String JOB_LOG_PATH_MDC_KEY = "job_log_path";
+ public static final String CLOUD_JOB_LOG_PATH_MDC_KEY = "cloud_job_log_path";
// S3/Minio
- public static String S3_LOG_BUCKET = "S3_LOG_BUCKET";
- public static String S3_LOG_BUCKET_REGION = "S3_LOG_BUCKET_REGION";
- public static String AWS_ACCESS_KEY_ID = "AWS_ACCESS_KEY_ID";
- public static String AWS_SECRET_ACCESS_KEY = "AWS_SECRET_ACCESS_KEY";
- public static String S3_MINIO_ENDPOINT = "S3_MINIO_ENDPOINT";
+ public static final String S3_LOG_BUCKET = "S3_LOG_BUCKET";
+ public static final String S3_LOG_BUCKET_REGION = "S3_LOG_BUCKET_REGION";
+ public static final String AWS_ACCESS_KEY_ID = "AWS_ACCESS_KEY_ID";
+ public static final String AWS_SECRET_ACCESS_KEY = "AWS_SECRET_ACCESS_KEY";
+ public static final String S3_MINIO_ENDPOINT = "S3_MINIO_ENDPOINT";
// GCS
- public static String GCP_STORAGE_BUCKET = "GCP_STORAGE_BUCKET";
- public static String GOOGLE_APPLICATION_CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS";
+ public static final String GCP_STORAGE_BUCKET = "GCP_STORAGE_BUCKET";
+ public static final String GOOGLE_APPLICATION_CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS";
- public static int DEFAULT_PAGE_SIZE = 1000;
- public static String LOG_FILENAME = "logs.log";
- public static String APP_LOGGING_CLOUD_PREFIX = "app-logging";
- public static String JOB_LOGGING_CLOUD_PREFIX = "job-logging";
+ public static final int DEFAULT_PAGE_SIZE = 1000;
+ public static final String LOG_FILENAME = "logs.log";
+ public static final String APP_LOGGING_CLOUD_PREFIX = "app-logging";
+ public static final String JOB_LOGGING_CLOUD_PREFIX = "job-logging";
- public static Path getServerLogsRoot(final Configs configs) {
- return configs.getWorkspaceRoot().resolve("server/logs");
+ public static synchronized LogClientSingleton getInstance() {
+ if (instance == null) {
+ instance = new LogClientSingleton();
+ }
+ return instance;
}
- public static Path getSchedulerLogsRoot(final Configs configs) {
- return configs.getWorkspaceRoot().resolve("scheduler/logs");
+ public Path getServerLogsRoot(final Path workspaceRoot) {
+ return workspaceRoot.resolve("server/logs");
}
- public static File getServerLogFile(final Configs configs) {
- final var logPathBase = getServerLogsRoot(configs);
- if (shouldUseLocalLogs(configs.getWorkerEnvironment())) {
- return logPathBase.resolve(LOG_FILENAME).toFile();
- }
+ public Path getSchedulerLogsRoot(final Path workspaceRoot) {
+ return workspaceRoot.resolve("scheduler/logs");
+ }
- final var logConfigs = new LogConfigDelegator(configs);
- final var cloudLogPath = APP_LOGGING_CLOUD_PREFIX + logPathBase;
+ public File getServerLogFile(final Path workspaceRoot, final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs) {
+ if (shouldUseLocalLogs(workerEnvironment)) {
+ return getServerLogsRoot(workspaceRoot).resolve(LOG_FILENAME).toFile();
+ }
+ final var cloudLogPath = APP_LOGGING_CLOUD_PREFIX + getServerLogsRoot(workspaceRoot);
try {
return logClient.downloadCloudLog(logConfigs, cloudLogPath);
} catch (final IOException e) {
@@ -83,14 +86,12 @@ public static File getServerLogFile(final Configs configs) {
}
}
- public static File getSchedulerLogFile(final Configs configs) {
- final var logPathBase = getSchedulerLogsRoot(configs);
- if (shouldUseLocalLogs(configs.getWorkerEnvironment())) {
- return logPathBase.resolve(LOG_FILENAME).toFile();
+ public File getSchedulerLogFile(final Path workspaceRoot, final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs) {
+ if (shouldUseLocalLogs(workerEnvironment)) {
+ return getSchedulerLogsRoot(workspaceRoot).resolve(LOG_FILENAME).toFile();
}
- final var logConfigs = new LogConfigDelegator(configs);
- final var cloudLogPath = APP_LOGGING_CLOUD_PREFIX + logPathBase;
+ final var cloudLogPath = APP_LOGGING_CLOUD_PREFIX + getSchedulerLogsRoot(workspaceRoot);
try {
return logClient.downloadCloudLog(logConfigs, cloudLogPath);
} catch (final IOException e) {
@@ -98,16 +99,15 @@ public static File getSchedulerLogFile(final Configs configs) {
}
}
- public static List getJobLogFile(final Configs configs, final Path logPath) throws IOException {
+ public List getJobLogFile(final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, final Path logPath) throws IOException {
if (logPath == null || logPath.equals(Path.of(""))) {
return Collections.emptyList();
}
- if (shouldUseLocalLogs(configs.getWorkerEnvironment())) {
+ if (shouldUseLocalLogs(workerEnvironment)) {
return IOs.getTail(LOG_TAIL_SIZE, logPath);
}
- final var logConfigs = new LogConfigDelegator(configs);
final var cloudLogPath = JOB_LOGGING_CLOUD_PREFIX + logPath;
return logClient.tailCloudLog(logConfigs, cloudLogPath, LOG_TAIL_SIZE);
}
@@ -116,52 +116,47 @@ public static List getJobLogFile(final Configs configs, final Path logPa
* Primarily to clean up logs after testing. Only valid for Kube logs.
*/
@VisibleForTesting
- public static void deleteLogs(final Configs configs, final String logPath) {
+ public void deleteLogs(final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, final String logPath) {
if (logPath == null || logPath.equals(Path.of(""))) {
return;
}
- if (shouldUseLocalLogs(configs.getWorkerEnvironment())) {
+ if (shouldUseLocalLogs(workerEnvironment)) {
throw new NotImplementedException("Local log deletes not supported.");
}
- final var logConfigs = new LogConfigDelegator(configs);
final var cloudLogPath = JOB_LOGGING_CLOUD_PREFIX + logPath;
logClient.deleteLogs(logConfigs, cloudLogPath);
}
- public static void setJobMdc(final Path path) {
- // setJobMdc is referenced from TemporalAttemptExecution without input parameters, so hard to pass
- // this in.
- final Configs configs = new EnvConfigs();
- if (shouldUseLocalLogs(configs.getWorkerEnvironment())) {
+ public void setJobMdc(final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, final Path path) {
+ if (shouldUseLocalLogs(workerEnvironment)) {
LOGGER.debug("Setting docker job mdc");
MDC.put(LogClientSingleton.JOB_LOG_PATH_MDC_KEY, path.resolve(LogClientSingleton.LOG_FILENAME).toString());
} else {
LOGGER.debug("Setting kube job mdc");
- final var logConfigs = new LogConfigDelegator(configs);
createCloudClientIfNull(logConfigs);
MDC.put(LogClientSingleton.CLOUD_JOB_LOG_PATH_MDC_KEY, path.resolve(LogClientSingleton.LOG_FILENAME).toString());
}
}
- public static void setWorkspaceMdc(final Path path) {
- final var configs = new EnvConfigs();
- if (shouldUseLocalLogs(configs.getWorkerEnvironment())) {
+ public void setWorkspaceMdc(final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, final Path path) {
+ if (shouldUseLocalLogs(workerEnvironment)) {
LOGGER.debug("Setting docker workspace mdc");
MDC.put(LogClientSingleton.WORKSPACE_MDC_KEY, path.toString());
} else {
LOGGER.debug("Setting kube workspace mdc");
- final var logConfigs = new LogConfigDelegator(configs);
createCloudClientIfNull(logConfigs);
MDC.put(LogClientSingleton.CLOUD_WORKSPACE_MDC_KEY, path.toString());
}
}
+ // This method should cease to exist here and become a property on the enum instead
+ // TODO handle this as part of refactor https://github.com/airbytehq/airbyte/issues/7545
private static boolean shouldUseLocalLogs(final WorkerEnvironment workerEnvironment) {
return workerEnvironment.equals(WorkerEnvironment.DOCKER);
}
- private static void createCloudClientIfNull(final LogConfigs configs) {
+ private void createCloudClientIfNull(final LogConfigs configs) {
if (logClient == null) {
logClient = CloudLogs.createCloudLogClient(configs);
}
diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogConfigDelegator.java b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogConfigDelegator.java
deleted file mode 100644
index 18f194d283d18..0000000000000
--- a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogConfigDelegator.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2021 Airbyte, Inc., all rights reserved.
- */
-
-package io.airbyte.config.helpers;
-
-import io.airbyte.config.Configs;
-
-/**
- * Implements {@link LogConfigs} by delegating to a {@link Configs} implementation. Because the
- * logging configuration overlaps with other configuration, this delegation is intended to avoid
- * multiple configurations existing at once.
- */
-public class LogConfigDelegator implements LogConfigs {
-
- private final Configs delegate;
-
- public LogConfigDelegator(final Configs configs) {
- delegate = configs;
- }
-
- @Override
- public String getS3LogBucket() {
- return delegate.getS3LogBucket();
- }
-
- @Override
- public String getS3LogBucketRegion() {
- return delegate.getS3LogBucketRegion();
- }
-
- @Override
- public String getAwsAccessKey() {
- return delegate.getAwsAccessKey();
- }
-
- @Override
- public String getAwsSecretAccessKey() {
- return delegate.getAwsSecretAccessKey();
- }
-
- @Override
- public String getS3MinioEndpoint() {
- return delegate.getS3MinioEndpoint();
- }
-
- @Override
- public String getGcpStorageBucket() {
- return delegate.getGcpStorageBucket();
- }
-
- @Override
- public String getGoogleApplicationCredentials() {
- return delegate.getGoogleApplicationCredentials();
- }
-
-}
diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogConfigs.java b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogConfigs.java
index 5acaadd83bace..345e21a5cd045 100644
--- a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogConfigs.java
+++ b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogConfigs.java
@@ -6,8 +6,8 @@
/**
* Configuration required to retrieve logs. This is a subset of the methods defined in
- * {@link io.airbyte.config.Configs} so actual look up can be delegated in
- * {@link LogConfigDelegator}. This prevents conflicting configuration existing at once.
+ * {@link io.airbyte.config.Configs} so actual look up can be delegated in {@link LogConfiguration}.
+ * This prevents conflicting configuration existing at once.
*/
public interface LogConfigs {
diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogConfiguration.java b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogConfiguration.java
new file mode 100644
index 0000000000000..85903e2c4100c
--- /dev/null
+++ b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogConfiguration.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+ */
+
+package io.airbyte.config.helpers;
+
+/**
+ * Implements {@link LogConfigs} with immutable values. Because the logging configuration overlaps
+ * with other configuration, this delegation is intended to avoid multiple configurations existing
+ * at once.
+ */
+public class LogConfiguration implements LogConfigs {
+
+ public final static LogConfiguration EMPTY = new LogConfiguration("", "", "", "", "", "", "");
+
+ private final String s3LogBucket;
+ private final String s3LogBucketRegion;
+ private final String awsAccessKey;
+ private final String awsSecretAccessKey;
+ private final String s3MinioEndpoint;
+ private final String gcpStorageBucket;
+ private final String googleApplicationCredentials;
+
+ public LogConfiguration(final String s3LogBucket,
+ final String s3LogBucketRegion,
+ final String awsAccessKey,
+ final String awsSecretAccessKey,
+ final String s3MinioEndpoint,
+ final String gcpStorageBucket,
+ final String googleApplicationCredentials) {
+ this.s3LogBucket = s3LogBucket;
+ this.s3LogBucketRegion = s3LogBucketRegion;
+ this.awsAccessKey = awsAccessKey;
+ this.awsSecretAccessKey = awsSecretAccessKey;
+ this.s3MinioEndpoint = s3MinioEndpoint;
+ this.gcpStorageBucket = gcpStorageBucket;
+ this.googleApplicationCredentials = googleApplicationCredentials;
+ }
+
+ @Override
+ public String getS3LogBucket() {
+ return s3LogBucket;
+ }
+
+ @Override
+ public String getS3LogBucketRegion() {
+ return s3LogBucketRegion;
+ }
+
+ @Override
+ public String getAwsAccessKey() {
+ return awsAccessKey;
+ }
+
+ @Override
+ public String getAwsSecretAccessKey() {
+ return awsSecretAccessKey;
+ }
+
+ @Override
+ public String getS3MinioEndpoint() {
+ return s3MinioEndpoint;
+ }
+
+ @Override
+ public String getGcpStorageBucket() {
+ return gcpStorageBucket;
+ }
+
+ @Override
+ public String getGoogleApplicationCredentials() {
+ return googleApplicationCredentials;
+ }
+
+}
diff --git a/airbyte-config/models/src/main/resources/types/DockerImageSpec.yaml b/airbyte-config/models/src/main/resources/types/DockerImageSpec.yaml
new file mode 100644
index 0000000000000..0d3becf8e74c6
--- /dev/null
+++ b/airbyte-config/models/src/main/resources/types/DockerImageSpec.yaml
@@ -0,0 +1,16 @@
+---
+"$schema": http://json-schema.org/draft-07/schema#
+"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/DockerImageSpec.yaml
+title: DockerImageSpec
+description: docker image name and the connector specification associated with it
+type: object
+required:
+ - dockerImage
+ - spec
+additionalProperties: false
+properties:
+ dockerImage:
+ type: string
+ spec:
+ type: object
+ existingJavaType: io.airbyte.protocol.models.ConnectorSpecification
diff --git a/airbyte-config/models/src/test/java/io/airbyte/config/helpers/GcsLogsTest.java b/airbyte-config/models/src/test/java/io/airbyte/config/helpers/GcsLogsTest.java
index f353c166716c0..b59774795442f 100644
--- a/airbyte-config/models/src/test/java/io/airbyte/config/helpers/GcsLogsTest.java
+++ b/airbyte-config/models/src/test/java/io/airbyte/config/helpers/GcsLogsTest.java
@@ -10,6 +10,7 @@
import static org.mockito.Mockito.when;
import io.airbyte.config.EnvConfigs;
+import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
@@ -36,8 +37,7 @@ public void testMissingConfiguration() {
*/
@Test
public void testRetrieveAllLogs() throws IOException {
- final var configs = new LogConfigDelegator(new EnvConfigs());
- final var data = GcsLogs.getFile(configs, "paginate", 6);
+ final File data = GcsLogs.getFile((new EnvConfigs()).getLogConfigs(), "paginate", 6);
final var retrieved = new ArrayList();
Files.lines(data.toPath()).forEach(retrieved::add);
@@ -56,8 +56,7 @@ public void testRetrieveAllLogs() throws IOException {
*/
@Test
public void testTail() throws IOException {
- final var configs = new LogConfigDelegator(new EnvConfigs());
- final var data = new GcsLogs().tailCloudLog(configs, "tail", 6);
+ final var data = new GcsLogs().tailCloudLog((new EnvConfigs()).getLogConfigs(), "tail", 6);
final var expected = List.of("Line 4", "Line 5", "Line 6", "Line 7", "Line 8", "Line 9");
assertEquals(data, expected);
diff --git a/airbyte-config/models/src/test/java/io/airbyte/config/helpers/KubeLoggingConfigTest.java b/airbyte-config/models/src/test/java/io/airbyte/config/helpers/KubeLoggingConfigTest.java
index 8d2477852e33b..726d235dcf40e 100644
--- a/airbyte-config/models/src/test/java/io/airbyte/config/helpers/KubeLoggingConfigTest.java
+++ b/airbyte-config/models/src/test/java/io/airbyte/config/helpers/KubeLoggingConfigTest.java
@@ -31,7 +31,8 @@ public class KubeLoggingConfigTest {
public void cleanUpLogs() {
if (logPath != null) {
try {
- LogClientSingleton.deleteLogs(new EnvConfigs(), logPath);
+ final EnvConfigs envConfigs = new EnvConfigs();
+ LogClientSingleton.getInstance().deleteLogs(envConfigs.getWorkerEnvironment(), envConfigs.getLogConfigs(), logPath);
} catch (final Exception e) {
// Ignore Minio delete error.
}
@@ -47,9 +48,10 @@ public void cleanUpLogs() {
*/
@Test
public void testLoggingConfiguration() throws IOException, InterruptedException {
+ final EnvConfigs envConfigs = new EnvConfigs();
final var randPath = Strings.addRandomSuffix("-", "", 5);
// This mirrors our Log4j2 set up. See log4j2.xml.
- LogClientSingleton.setJobMdc(Path.of(randPath));
+ LogClientSingleton.getInstance().setJobMdc(envConfigs.getWorkerEnvironment(), envConfigs.getLogConfigs(), Path.of(randPath));
final var toLog = List.of("line 1", "line 2", "line 3");
for (final String l : toLog) {
@@ -64,7 +66,7 @@ public void testLoggingConfiguration() throws IOException, InterruptedException
logPath = randPath + "/logs.log/";
// The same env vars that log4j2 uses to determine where to publish to determine how to retrieve the
// log file.
- final var logs = LogClientSingleton.getJobLogFile(new EnvConfigs(), Path.of(logPath));
+ final var logs = LogClientSingleton.getInstance().getJobLogFile(envConfigs.getWorkerEnvironment(), envConfigs.getLogConfigs(), Path.of(logPath));
// Each log line is of the form . Further, there might be
// other log lines from the system running. Join all the lines to simplify assertions.
final var logsLine = Strings.join(logs, " ");
diff --git a/airbyte-config/models/src/test/java/io/airbyte/config/helpers/LogClientSingletonTest.java b/airbyte-config/models/src/test/java/io/airbyte/config/helpers/LogClientSingletonTest.java
index c1d60e8955e72..2d5de01af56b5 100644
--- a/airbyte-config/models/src/test/java/io/airbyte/config/helpers/LogClientSingletonTest.java
+++ b/airbyte-config/models/src/test/java/io/airbyte/config/helpers/LogClientSingletonTest.java
@@ -29,25 +29,28 @@ class LogClientSingletonTest {
void setup() {
configs = mock(Configs.class);
mockLogClient = mock(CloudLogs.class);
- LogClientSingleton.logClient = mockLogClient;
+ LogClientSingleton.getInstance().logClient = mockLogClient;
}
@Test
void testGetJobLogFileK8s() throws IOException {
when(configs.getWorkerEnvironment()).thenReturn(WorkerEnvironment.KUBERNETES);
- assertEquals(Collections.emptyList(), LogClientSingleton.getJobLogFile(configs, Path.of("/job/1")));
+ assertEquals(Collections.emptyList(),
+ LogClientSingleton.getInstance().getJobLogFile(configs.getWorkerEnvironment(), configs.getLogConfigs(), Path.of("/job/1")));
verify(mockLogClient).tailCloudLog(any(), eq("job-logging/job/1"), eq(LogClientSingleton.LOG_TAIL_SIZE));
}
@Test
void testGetJobLogFileNullPath() throws IOException {
- assertEquals(Collections.emptyList(), LogClientSingleton.getJobLogFile(configs, null));
+ assertEquals(Collections.emptyList(),
+ LogClientSingleton.getInstance().getJobLogFile(configs.getWorkerEnvironment(), configs.getLogConfigs(), null));
verifyNoInteractions(mockLogClient);
}
@Test
void testGetJobLogFileEmptyPath() throws IOException {
- assertEquals(Collections.emptyList(), LogClientSingleton.getJobLogFile(configs, Path.of("")));
+ assertEquals(Collections.emptyList(),
+ LogClientSingleton.getInstance().getJobLogFile(configs.getWorkerEnvironment(), configs.getLogConfigs(), Path.of("")));
verifyNoInteractions(mockLogClient);
}
diff --git a/airbyte-config/models/src/test/java/io/airbyte/config/helpers/S3LogsTest.java b/airbyte-config/models/src/test/java/io/airbyte/config/helpers/S3LogsTest.java
index 74959a55be01f..07d47af19fdba 100644
--- a/airbyte-config/models/src/test/java/io/airbyte/config/helpers/S3LogsTest.java
+++ b/airbyte-config/models/src/test/java/io/airbyte/config/helpers/S3LogsTest.java
@@ -25,6 +25,8 @@
@Tag("logger-client")
public class S3LogsTest {
+ private static final LogConfigs logConfigs = (new EnvConfigs()).getLogConfigs();
+
@Test
public void testMissingCredentials() {
final var configs = mock(LogConfigs.class);
@@ -41,8 +43,7 @@ public void testMissingCredentials() {
*/
@Test
public void testRetrieveAllLogs() throws IOException {
- final var configs = new LogConfigDelegator(new EnvConfigs());
- final var data = S3Logs.getFile(configs, "paginate", 6);
+ final var data = S3Logs.getFile(logConfigs, "paginate", 6);
final var retrieved = new ArrayList();
Files.lines(data.toPath()).forEach(retrieved::add);
@@ -61,9 +62,7 @@ public void testRetrieveAllLogs() throws IOException {
*/
@Test
public void testTail() throws IOException {
- final var configs = new LogConfigDelegator(new EnvConfigs());
- final var data = new S3Logs().tailCloudLog(configs, "tail", 6);
-
+ final var data = new S3Logs().tailCloudLog(logConfigs, "tail", 6);
final var expected = List.of("Line 4", "Line 5", "Line 6", "Line 7", "Line 8", "Line 9");
assertEquals(data, expected);
}
diff --git a/airbyte-config/persistence/build.gradle b/airbyte-config/persistence/build.gradle
index 834f38596c98d..6b072911359db 100644
--- a/airbyte-config/persistence/build.gradle
+++ b/airbyte-config/persistence/build.gradle
@@ -11,7 +11,6 @@ dependencies {
implementation project(':airbyte-db:jooq')
implementation project(':airbyte-protocol:models')
implementation project(':airbyte-config:models')
- implementation project(':airbyte-config:init')
implementation project(':airbyte-json-validation')
implementation 'com.google.cloud:google-cloud-secretmanager:1.7.2'
testImplementation "org.testcontainers:postgresql:1.15.3"
diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java
index d7d47242df735..40c5edb5286b4 100644
--- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java
+++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java
@@ -461,7 +461,7 @@ public void updateConnectionState(final UUID connectionId, final State state) th
/**
* Converts between a dumpConfig() output and a replaceAllConfigs() input, by deserializing the
- * string/jsonnode into the AirbyteConfig, Stream