diff --git a/.github/actions/run-dagger-pipeline/action.yml b/.github/actions/run-dagger-pipeline/action.yml
index 8d28ea9e788cd..5ba6faa69a479 100644
--- a/.github/actions/run-dagger-pipeline/action.yml
+++ b/.github/actions/run-dagger-pipeline/action.yml
@@ -83,6 +83,9 @@ inputs:
description: "URL to airbyte-ci binary"
required: false
default: https://connectors.airbyte.com/airbyte-ci/releases/ubuntu/latest/airbyte-ci
+ python_registry_token:
+ description: "Python registry API token to publish python package"
+ required: false
runs:
using: "composite"
@@ -182,3 +185,4 @@ runs:
CI: "True"
TAILSCALE_AUTH_KEY: ${{ inputs.tailscale_auth_key }}
DOCKER_REGISTRY_MIRROR_URL: ${{ inputs.docker_registry_mirror_url }}
+ PYTHON_REGISTRY_TOKEN: ${{ inputs.python_registry_token }}
diff --git a/.github/workflows/publish_connectors.yml b/.github/workflows/publish_connectors.yml
index f0d10033f4a2d..ae431454eda8b 100644
--- a/.github/workflows/publish_connectors.yml
+++ b/.github/workflows/publish_connectors.yml
@@ -63,6 +63,7 @@ jobs:
s3_build_cache_secret_key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }}
tailscale_auth_key: ${{ secrets.TAILSCALE_AUTH_KEY }}
subcommand: "connectors --concurrency=1 --execute-timeout=3600 --metadata-changes-only publish --main-release"
+ python_registry_token: ${{ secrets.PYPI_TOKEN }}
- name: Publish connectors [manual]
id: publish-connectors
@@ -84,6 +85,7 @@ jobs:
s3_build_cache_secret_key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }}
tailscale_auth_key: ${{ secrets.TAILSCALE_AUTH_KEY }}
subcommand: "connectors ${{ github.event.inputs.connectors-options }} publish ${{ github.event.inputs.publish-options }}"
+ python_registry_token: ${{ secrets.PYPI_TOKEN }}
set-instatus-incident-on-failure:
name: Create Instatus Incident on Failure
diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md
index e762b5705d248..fb5d36815d70f 100644
--- a/airbyte-ci/connectors/pipelines/README.md
+++ b/airbyte-ci/connectors/pipelines/README.md
@@ -491,6 +491,37 @@ This command runs formatting checks and reformats any code that would be reforma
Running `airbyte-ci format fix all` will format all of the different types of code. Run `airbyte-ci format fix --help` for subcommands to format only certain types of files.
+### `poetry` command subgroup
+
+Available commands:
+
+- `airbyte-ci poetry publish`
+
+### Options
+
+| Option | Required | Default | Mapped environment variable | Description |
+| ------------------- | -------- | ------- | --------------------------- | ------------------------------------------------------------------------------------------- |
+| `--package-path` | True | | | The path to the python package to execute a poetry command on. |
+
+### Examples
+
+- Publish a python package: `airbyte-ci poetry --package-path=path/to/package publish --publish-name=my-package --publish-version="1.2.3" --python-registry-token="..." --registry-url="http://host.docker.internal:8012/"`
+
+### `publish` command
+
+This command publishes poetry packages (using `pyproject.toml`) or python packages (using `setup.py`) to a python registry.
+
+For poetry packages, the package name and version can be taken from the `pyproject.toml` file or be specified as options.
+
+#### Options
+
+| Option | Required | Default | Mapped environment variable | Description |
+| ------------------------- | -------- | ----------------------- | --------------------------- | --------------------------------------------------------------------------------------------------------------- |
+| `--publish-name` | False | | | The name of the package. Not required for poetry packages that define it in the `pyproject.toml` file |
+| `--publish-version` | False | | | The version of the package. Not required for poetry packages that define it in the `pyproject.toml` file |
+| `--python-registry-token` | True | | PYTHON_REGISTRY_TOKEN | The API token to authenticate with the registry. For pypi, the `pypi-` prefix needs to be specified |
+| `--registry-url` | False | https://pypi.org/simple | | The python registry to publish to. Defaults to main pypi |
+
### `metadata` command subgroup
Available commands:
@@ -547,7 +578,8 @@ E.G.: running `pytest` on a specific test folder:
| Version | PR | Description |
| ------- | ---------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- |
-| 3.5.3 | [#34339](https://github.com/airbytehq/airbyte/pull/34339) | only do minimal changes on a connector version_bump |
+| 3.6.0 | [#34111](https://github.com/airbytehq/airbyte/pull/34111) | Add python registry publishing |
+| 3.5.3 | [#34339](https://github.com/airbytehq/airbyte/pull/34339) | only do minimal changes on a connector version_bump |
| 3.5.2 | [#34381](https://github.com/airbytehq/airbyte/pull/34381) | Bind a sidecar docker host for `airbyte-ci test` |
| 3.5.1 | [#34321](https://github.com/airbytehq/airbyte/pull/34321) | Upgrade to Dagger 0.9.6 . |
| 3.5.0 | [#33313](https://github.com/airbytehq/airbyte/pull/33313) | Pass extra params after Gradle tasks. |
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py
index a4471bac7ecaf..829ab07b4e0a0 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py
@@ -89,12 +89,16 @@ def metadata_service_gcs_credentials_secret(self) -> Secret:
def spec_cache_gcs_credentials_secret(self) -> Secret:
return self.dagger_client.set_secret("spec_cache_gcs_credentials", self.spec_cache_gcs_credentials)
+ @property
+ def pre_release_suffix(self) -> str:
+ return self.git_revision[:10]
+
@property
def docker_image_tag(self) -> str:
# get the docker image tag from the parent class
metadata_tag = super().docker_image_tag
if self.pre_release:
- return f"{metadata_tag}-dev.{self.git_revision[:10]}"
+ return f"{metadata_tag}-dev.{self.pre_release_suffix}"
else:
return metadata_tag
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py
index ba61a521ec66c..c6e4002829c33 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py
@@ -14,8 +14,10 @@
from pipelines.airbyte_ci.connectors.publish.context import PublishConnectorContext
from pipelines.airbyte_ci.connectors.reports import ConnectorReport
from pipelines.airbyte_ci.metadata.pipeline import MetadataUpload, MetadataValidation
+from pipelines.airbyte_ci.steps.python_registry import PublishToPythonRegistry, PythonRegistryPublishContext
from pipelines.dagger.actions.remote_storage import upload_to_gcs
from pipelines.dagger.actions.system import docker
+from pipelines.helpers.pip import is_package_published
from pipelines.models.steps import Step, StepResult, StepStatus
from pydantic import ValidationError
@@ -52,6 +54,28 @@ async def _run(self) -> StepResult:
return StepResult(self, status=StepStatus.SUCCESS, stdout=f"No manifest found for {self.context.docker_image}.")
+class CheckPythonRegistryPackageDoesNotExist(Step):
+ context: PythonRegistryPublishContext
+ title = "Check if the connector is published on python registry"
+
+ async def _run(self) -> StepResult:
+ is_published = is_package_published(
+ self.context.package_metadata.name, self.context.package_metadata.version, self.context.registry
+ )
+ if is_published:
+ return StepResult(
+ self,
+ status=StepStatus.SKIPPED,
+ stderr=f"{self.context.package_metadata.name} already exists in version {self.context.package_metadata.version}.",
+ )
+ else:
+ return StepResult(
+ self,
+ status=StepStatus.SUCCESS,
+ stdout=f"{self.context.package_metadata.name} does not exist in version {self.context.package_metadata.version}.",
+ )
+
+
class PushConnectorImageToRegistry(Step):
context: PublishConnectorContext
title = "Push connector image to registry"
@@ -259,6 +283,11 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport:
check_connector_image_results = await CheckConnectorImageDoesNotExist(context).run()
results.append(check_connector_image_results)
+ python_registry_steps, terminate_early = await _run_python_registry_publish_pipeline(context)
+ results.extend(python_registry_steps)
+ if terminate_early:
+ return create_connector_report(results)
+
# If the connector image already exists, we don't need to build it, but we still need to upload the metadata file.
# We also need to upload the spec to the spec cache bucket.
if check_connector_image_results.status is StepStatus.SKIPPED:
@@ -312,6 +341,33 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport:
return connector_report
+async def _run_python_registry_publish_pipeline(context: PublishConnectorContext) -> Tuple[List[StepResult], bool]:
+ """
+ Run the python registry publish pipeline for a single connector.
+ Return the results of the steps and a boolean indicating whether there was an error and the pipeline should be stopped.
+ """
+ results: List[StepResult] = []
+ # Try to convert the context to a PythonRegistryPublishContext. If it returns None, it means we don't need to publish to a python registry.
+ python_registry_context = await PythonRegistryPublishContext.from_publish_connector_context(context)
+ if not python_registry_context:
+ return results, False
+
+ check_python_registry_package_exists_results = await CheckPythonRegistryPackageDoesNotExist(python_registry_context).run()
+ results.append(check_python_registry_package_exists_results)
+ if check_python_registry_package_exists_results.status is StepStatus.SKIPPED:
+ context.logger.info("The connector version is already published on python registry.")
+ elif check_python_registry_package_exists_results.status is StepStatus.SUCCESS:
+ context.logger.info("The connector version is not published on python registry. Let's build and publish it.")
+ publish_to_python_registry_results = await PublishToPythonRegistry(python_registry_context).run()
+ results.append(publish_to_python_registry_results)
+ if publish_to_python_registry_results.status is StepStatus.FAILURE:
+ return results, True
+ elif check_python_registry_package_exists_results.status is StepStatus.FAILURE:
+ return results, True
+
+ return results, False
+
+
def reorder_contexts(contexts: List[PublishConnectorContext]) -> List[PublishConnectorContext]:
"""Reorder contexts so that the ones that are for strict-encrypt/secure connectors come first.
The metadata upload on publish checks if the the connectors referenced in the metadata file are already published to DockerHub.
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/__init__.py
new file mode 100644
index 0000000000000..c941b30457953
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/__init__.py
@@ -0,0 +1,3 @@
+#
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+#
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/commands.py
new file mode 100644
index 0000000000000..72dbe53b170f8
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/commands.py
@@ -0,0 +1,34 @@
+#
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+#
+
+"""
+Module exposing the format commands.
+"""
+from __future__ import annotations
+
+import asyncclick as click
+from pipelines.cli.click_decorators import click_ignore_unused_kwargs, click_merge_args_into_context_obj
+from pipelines.cli.lazy_group import LazyGroup
+from pipelines.models.contexts.click_pipeline_context import ClickPipelineContext, pass_pipeline_context
+
+
+@click.group(
+ name="poetry",
+ help="Commands related to running poetry commands.",
+ cls=LazyGroup,
+ lazy_subcommands={
+ "publish": "pipelines.airbyte_ci.poetry.publish.commands.publish",
+ },
+)
+@click.option(
+ "--package-path",
+ help="The path to publish",
+ type=click.STRING,
+ required=True,
+)
+@click_merge_args_into_context_obj
+@pass_pipeline_context
+@click_ignore_unused_kwargs
+async def poetry(pipeline_context: ClickPipelineContext) -> None:
+ pass
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/publish/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/publish/__init__.py
new file mode 100644
index 0000000000000..c941b30457953
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/publish/__init__.py
@@ -0,0 +1,3 @@
+#
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+#
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/publish/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/publish/commands.py
new file mode 100644
index 0000000000000..29785f8312661
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/poetry/publish/commands.py
@@ -0,0 +1,105 @@
+#
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+#
+
+"""
+Module exposing the format commands.
+"""
+from __future__ import annotations
+
+from typing import Optional
+
+import asyncclick as click
+from packaging import version
+from pipelines.airbyte_ci.steps.python_registry import PublishToPythonRegistry
+from pipelines.cli.confirm_prompt import confirm
+from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand
+from pipelines.consts import DEFAULT_PYTHON_PACKAGE_REGISTRY_URL
+from pipelines.models.contexts.click_pipeline_context import ClickPipelineContext, pass_pipeline_context
+from pipelines.models.contexts.python_registry_publish import PythonRegistryPublishContext
+from pipelines.models.steps import StepStatus
+
+
+async def _has_metadata_yaml(context: PythonRegistryPublishContext) -> bool:
+ dir_to_publish = context.get_repo_dir(context.package_path)
+ return "metadata.yaml" in await dir_to_publish.entries()
+
+
+def _validate_python_version(_ctx: dict, _param: dict, value: Optional[str]) -> Optional[str]:
+ """
+ Check if an given version is valid.
+ """
+ if value is None:
+ return value
+ try:
+ version.Version(value)
+ return value
+ except version.InvalidVersion:
+ raise click.BadParameter(f"Version {value} is not a valid version.")
+
+
+@click.command(cls=DaggerPipelineCommand, name="publish", help="Publish a Python package to a registry.")
+@click.option(
+ "--python-registry-token",
+ help="Access token",
+ type=click.STRING,
+ required=True,
+ envvar="PYTHON_REGISTRY_TOKEN",
+)
+@click.option(
+ "--registry-url",
+ help="Which registry to publish to. If not set, the default pypi is used. For test pypi, use https://test.pypi.org/legacy/",
+ type=click.STRING,
+ default=DEFAULT_PYTHON_PACKAGE_REGISTRY_URL,
+)
+@click.option(
+ "--publish-name",
+ help="The name of the package to publish. If not set, the name will be inferred from the pyproject.toml file of the package.",
+ type=click.STRING,
+)
+@click.option(
+ "--publish-version",
+ help="The version of the package to publish. If not set, the version will be inferred from the pyproject.toml file of the package.",
+ type=click.STRING,
+ callback=_validate_python_version,
+)
+@pass_pipeline_context
+@click.pass_context
+async def publish(
+ ctx: click.Context,
+ click_pipeline_context: ClickPipelineContext,
+ python_registry_token: str,
+ registry_url: str,
+ publish_name: Optional[str],
+ publish_version: Optional[str],
+) -> bool:
+ context = PythonRegistryPublishContext(
+ is_local=ctx.obj["is_local"],
+ git_branch=ctx.obj["git_branch"],
+ git_revision=ctx.obj["git_revision"],
+ ci_report_bucket=ctx.obj["ci_report_bucket_name"],
+ report_output_prefix=ctx.obj["report_output_prefix"],
+ gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"),
+ dagger_logs_url=ctx.obj.get("dagger_logs_url"),
+ pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"),
+ ci_context=ctx.obj.get("ci_context"),
+ ci_gcs_credentials=ctx.obj["ci_gcs_credentials"],
+ python_registry_token=python_registry_token,
+ registry=registry_url,
+ package_path=ctx.obj["package_path"],
+ package_name=publish_name,
+ version=publish_version,
+ )
+
+ dagger_client = await click_pipeline_context.get_dagger_client(pipeline_name=f"Publish {ctx.obj['package_path']} to python registry")
+ context.dagger_client = dagger_client
+
+ if await _has_metadata_yaml(context):
+ confirm(
+ "It looks like you are trying to publish a connector. In most cases, the `connectors` command group should be used instead. Do you want to continue?",
+ abort=True,
+ )
+
+ publish_result = await PublishToPythonRegistry(context).run()
+
+ return publish_result.status is StepStatus.SUCCESS
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/python_registry.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/python_registry.py
new file mode 100644
index 0000000000000..aec2e30bb3da2
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/python_registry.py
@@ -0,0 +1,165 @@
+#
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+#
+
+import configparser
+import io
+import uuid
+from enum import Enum, auto
+from typing import Dict, Optional
+
+import tomli
+import tomli_w
+from dagger import Container, Directory
+from pipelines.consts import PYPROJECT_TOML_FILE_PATH, SETUP_PY_FILE_PATH
+from pipelines.dagger.actions.python.poetry import with_poetry
+from pipelines.helpers.utils import sh_dash_c
+from pipelines.models.contexts.python_registry_publish import PythonPackageMetadata, PythonRegistryPublishContext
+from pipelines.models.steps import Step, StepResult
+
+
+class PackageType(Enum):
+ POETRY = auto()
+ PIP = auto()
+
+
+class PublishToPythonRegistry(Step):
+ context: PythonRegistryPublishContext
+ title = "Publish package to python registry"
+
+ def _get_base_container(self) -> Container:
+ return with_poetry(self.context)
+
+ async def _get_package_metadata_from_pyproject_toml(self, package_dir_to_publish: Directory) -> Optional[PythonPackageMetadata]:
+ pyproject_toml = package_dir_to_publish.file(PYPROJECT_TOML_FILE_PATH)
+ pyproject_toml_content = await pyproject_toml.contents()
+ contents = tomli.loads(pyproject_toml_content)
+ try:
+ return PythonPackageMetadata(contents["tool"]["poetry"]["name"], contents["tool"]["poetry"]["version"])
+ except KeyError:
+ return None
+
+ async def _get_package_type(self, package_dir_to_publish: Directory) -> Optional[PackageType]:
+ files = await package_dir_to_publish.entries()
+ has_pyproject_toml = PYPROJECT_TOML_FILE_PATH in files
+ has_setup_py = SETUP_PY_FILE_PATH in files
+ if has_pyproject_toml:
+ return PackageType.POETRY
+ elif has_setup_py:
+ return PackageType.PIP
+ else:
+ return None
+
+ async def _run(self) -> StepResult:
+ package_dir_to_publish = await self.context.get_repo_dir(self.context.package_path)
+ package_type = await self._get_package_type(package_dir_to_publish)
+
+ if not package_type:
+ return self.skip("Connector does not have a pyproject.toml file or setup.py file, skipping.")
+
+ result = await self._ensure_package_name_and_version(package_dir_to_publish, package_type)
+ if result:
+ return result
+
+ self.logger.info(
+ f"Uploading package {self.context.package_metadata.name} version {self.context.package_metadata.version} to {self.context.registry}..."
+ )
+
+ return await self._publish(package_dir_to_publish, package_type)
+
+ async def _ensure_package_name_and_version(self, package_dir_to_publish: Directory, package_type: PackageType) -> Optional[StepResult]:
+ """
+ Try to infer package name and version from the pyproject.toml file. If it is not present, we need to have the package name and version set.
+ Setup.py packages need to set package name and version as parameter.
+
+ Returns None if package name and version are set, otherwise a StepResult with a skip message.
+ """
+ if self.context.package_metadata.name and self.context.package_metadata.version:
+ return None
+
+ if package_type is not PackageType.POETRY:
+ return self.skip("Connector does not have a pyproject.toml file and version and package name is not set otherwise, skipping.")
+
+ inferred_package_metadata = await self._get_package_metadata_from_pyproject_toml(package_dir_to_publish)
+
+ if not inferred_package_metadata:
+ return self.skip(
+ "Connector does not have a pyproject.toml file which specifies package name and version and they are not set otherwise, skipping."
+ )
+
+ if not self.context.package_metadata.name:
+ self.context.package_metadata.name = inferred_package_metadata.name
+ if not self.context.package_metadata.version:
+ self.context.package_metadata.version = inferred_package_metadata.version
+
+ return None
+
+ async def _publish(self, package_dir_to_publish: Directory, package_type: PackageType) -> StepResult:
+ if package_type is PackageType.PIP:
+ return await self._pip_publish(package_dir_to_publish)
+ else:
+ return await self._poetry_publish(package_dir_to_publish)
+
+ async def _poetry_publish(self, package_dir_to_publish: Directory) -> StepResult:
+ python_registry_token = self.context.dagger_client.set_secret("python_registry_token", self.context.python_registry_token)
+ pyproject_toml = package_dir_to_publish.file(PYPROJECT_TOML_FILE_PATH)
+ pyproject_toml_content = await pyproject_toml.contents()
+ contents = tomli.loads(pyproject_toml_content)
+ # make sure package name and version are set to the configured one
+ contents["tool"]["poetry"]["name"] = self.context.package_metadata.name
+ contents["tool"]["poetry"]["version"] = self.context.package_metadata.version
+ # enforce consistent author
+ contents["tool"]["poetry"]["authors"] = ["Airbyte "]
+ poetry_publish = (
+ self._get_base_container()
+ .with_secret_variable("PYTHON_REGISTRY_TOKEN", python_registry_token)
+ .with_directory("package", package_dir_to_publish)
+ .with_workdir("package")
+ .with_new_file(PYPROJECT_TOML_FILE_PATH, contents=tomli_w.dumps(contents))
+ .with_exec(["poetry", "config", "repositories.mypypi", self.context.registry])
+ .with_exec(sh_dash_c(["poetry config pypi-token.mypypi $PYTHON_REGISTRY_TOKEN"]))
+ .with_env_variable("CACHEBUSTER", str(uuid.uuid4()))
+ .with_exec(sh_dash_c(["poetry publish --build --repository mypypi -vvv --no-interaction"]))
+ )
+
+ return await self.get_step_result(poetry_publish)
+
+ async def _pip_publish(self, package_dir_to_publish: Directory) -> StepResult:
+ files = await package_dir_to_publish.entries()
+ pypi_username = self.context.dagger_client.set_secret("pypi_username", "__token__")
+ pypi_password = self.context.dagger_client.set_secret("pypi_password", self.context.python_registry_token)
+ metadata: Dict[str, str] = {
+ "name": str(self.context.package_metadata.name),
+ "version": str(self.context.package_metadata.version),
+ # Enforce consistent author
+ "author": "Airbyte",
+ "author_email": "contact@airbyte.io",
+ }
+ if "README.md" in files:
+ metadata["long_description"] = await package_dir_to_publish.file("README.md").contents()
+ metadata["long_description_content_type"] = "text/markdown"
+
+ config = configparser.ConfigParser()
+ config["metadata"] = metadata
+
+ setup_cfg_io = io.StringIO()
+ config.write(setup_cfg_io)
+ setup_cfg = setup_cfg_io.getvalue()
+
+ twine_upload = (
+ self._get_base_container()
+ .with_exec(sh_dash_c(["apt-get update", "apt-get install -y twine"]))
+ .with_directory("package", package_dir_to_publish)
+ .with_workdir("package")
+ # clear out setup.py metadata so setup.cfg is used
+ .with_exec(["sed", "-i", "/name=/d; /author=/d; /author_email=/d; /version=/d", SETUP_PY_FILE_PATH])
+ .with_new_file("setup.cfg", contents=setup_cfg)
+ .with_exec(["pip", "install", "--upgrade", "setuptools", "wheel"])
+ .with_exec(["python", SETUP_PY_FILE_PATH, "sdist", "bdist_wheel"])
+ .with_secret_variable("TWINE_USERNAME", pypi_username)
+ .with_secret_variable("TWINE_PASSWORD", pypi_password)
+ .with_env_variable("CACHEBUSTER", str(uuid.uuid4()))
+ .with_exec(["twine", "upload", "--verbose", "--repository-url", self.context.registry, "dist/*"])
+ )
+
+ return await self.get_step_result(twine_upload)
diff --git a/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py
index 8779fee5eab1b..f0a9a526f66f4 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py
@@ -130,6 +130,7 @@ def is_current_process_wrapped_by_dagger_run() -> bool:
help="Airbyte CI top-level command group.",
lazy_subcommands={
"connectors": "pipelines.airbyte_ci.connectors.commands.connectors",
+ "poetry": "pipelines.airbyte_ci.poetry.commands.poetry",
"format": "pipelines.airbyte_ci.format.commands.format_code",
"metadata": "pipelines.airbyte_ci.metadata.commands.metadata",
"test": "pipelines.airbyte_ci.test.commands.test",
diff --git a/airbyte-ci/connectors/pipelines/pipelines/consts.py b/airbyte-ci/connectors/pipelines/pipelines/consts.py
index 851578cc7a0ca..20bb7bd55211d 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/consts.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/consts.py
@@ -60,6 +60,8 @@
POETRY_CACHE_PATH = "/root/.cache/pypoetry"
STORAGE_DRIVER = "fuse-overlayfs"
TAILSCALE_AUTH_KEY = os.getenv("TAILSCALE_AUTH_KEY")
+SETUP_PY_FILE_PATH = "setup.py"
+DEFAULT_PYTHON_PACKAGE_REGISTRY_URL = "https://pypi.org/simple"
class CIContext(str, Enum):
diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/pip.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/pip.py
new file mode 100644
index 0000000000000..3fa6fa396c097
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/pip.py
@@ -0,0 +1,27 @@
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+
+from typing import Optional
+from urllib.parse import urlparse
+
+import requests
+
+
+def is_package_published(package_name: Optional[str], version: Optional[str], registry_url: str) -> bool:
+ """
+ Check if a package with a specific version is published on PyPI or Test PyPI.
+
+ :param package_name: The name of the package to check.
+ :param version: The version of the package.
+ :param test_pypi: Set to True to check on Test PyPI, False for regular PyPI.
+ :return: True if the package is found with the specified version, False otherwise.
+ """
+ if not package_name or not version:
+ return False
+
+ parsed_registry_url = urlparse(registry_url)
+ base_url = f"{parsed_registry_url.scheme}://{parsed_registry_url.netloc}"
+
+ url = f"{base_url}/{package_name}/{version}/json"
+
+ response = requests.get(url)
+ return response.status_code == 200
diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/slack.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/slack.py
index affc981a60de0..619be4278b575 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/helpers/slack.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/slack.py
@@ -4,11 +4,11 @@
import json
-import requests # type: ignore
+import requests
from pipelines import main_logger
-def send_message_to_webhook(message: str, channel: str, webhook: str) -> dict:
+def send_message_to_webhook(message: str, channel: str, webhook: str) -> requests.Response:
payload = {"channel": f"#{channel}", "username": "Connectors CI/CD Bot", "text": message}
response = requests.post(webhook, data={"payload": json.dumps(payload)})
diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/contexts/python_registry_publish.py b/airbyte-ci/connectors/pipelines/pipelines/models/contexts/python_registry_publish.py
new file mode 100644
index 0000000000000..ee45760d2f9d9
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/pipelines/models/contexts/python_registry_publish.py
@@ -0,0 +1,106 @@
+#
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+#
+
+import os
+from dataclasses import dataclass
+from datetime import datetime
+from typing import Optional, Type
+
+from pipelines.airbyte_ci.connectors.context import PipelineContext
+from pipelines.airbyte_ci.connectors.publish.context import PublishConnectorContext
+from pipelines.consts import DEFAULT_PYTHON_PACKAGE_REGISTRY_URL
+
+
+@dataclass
+class PythonPackageMetadata:
+ name: Optional[str]
+ version: Optional[str]
+
+
+class PythonRegistryPublishContext(PipelineContext):
+ def __init__(
+ self,
+ python_registry_token: str,
+ package_path: str,
+ report_output_prefix: str,
+ is_local: bool,
+ git_branch: str,
+ git_revision: str,
+ ci_report_bucket: Optional[str] = None,
+ registry: str = DEFAULT_PYTHON_PACKAGE_REGISTRY_URL,
+ gha_workflow_run_url: Optional[str] = None,
+ dagger_logs_url: Optional[str] = None,
+ pipeline_start_timestamp: Optional[int] = None,
+ ci_context: Optional[str] = None,
+ ci_gcs_credentials: Optional[str] = None,
+ package_name: Optional[str] = None,
+ version: Optional[str] = None,
+ ) -> None:
+ self.python_registry_token = python_registry_token
+ self.registry = registry
+ self.package_path = package_path
+ self.package_metadata = PythonPackageMetadata(package_name, version)
+
+ pipeline_name = f"Publish PyPI {package_path}"
+
+ super().__init__(
+ pipeline_name=pipeline_name,
+ report_output_prefix=report_output_prefix,
+ ci_report_bucket=ci_report_bucket,
+ is_local=is_local,
+ git_branch=git_branch,
+ git_revision=git_revision,
+ gha_workflow_run_url=gha_workflow_run_url,
+ dagger_logs_url=dagger_logs_url,
+ pipeline_start_timestamp=pipeline_start_timestamp,
+ ci_context=ci_context,
+ ci_gcs_credentials=ci_gcs_credentials,
+ )
+
+ @classmethod
+ async def from_publish_connector_context(
+ cls: Type["PythonRegistryPublishContext"], connector_context: PublishConnectorContext
+ ) -> Optional["PythonRegistryPublishContext"]:
+ """
+ Create a PythonRegistryPublishContext from a ConnectorContext.
+
+ The metadata of the connector is read from the current workdir to capture changes that are not yet published.
+ If pypi is not enabled, this will return None.
+ """
+
+ current_metadata = connector_context.connector.metadata
+ connector_context.logger.info(f"Current metadata: {str(current_metadata)}")
+ if (
+ "remoteRegistries" not in current_metadata
+ or "pypi" not in current_metadata["remoteRegistries"]
+ or not current_metadata["remoteRegistries"]["pypi"]["enabled"]
+ ):
+ return None
+
+ version = current_metadata["dockerImageTag"]
+ if connector_context.pre_release:
+ # use current date as pre-release version
+ # we can't use the git revision because not all python registries allow local version identifiers. Public version identifiers must conform to PEP 440 and only allow digits.
+ release_candidate_tag = datetime.now().strftime("%Y%m%d%H%M")
+ version = f"{version}.dev{release_candidate_tag}"
+
+ pypi_context = cls(
+ python_registry_token=os.environ["PYTHON_REGISTRY_TOKEN"],
+ registry="https://test.pypi.org/legacy/", # TODO: go live
+ package_path=str(connector_context.connector.code_directory),
+ package_name=current_metadata["remoteRegistries"]["pypi"]["packageName"],
+ version=version,
+ ci_report_bucket=connector_context.ci_report_bucket,
+ report_output_prefix=connector_context.report_output_prefix,
+ is_local=connector_context.is_local,
+ git_branch=connector_context.git_branch,
+ git_revision=connector_context.git_revision,
+ gha_workflow_run_url=connector_context.gha_workflow_run_url,
+ dagger_logs_url=connector_context.dagger_logs_url,
+ pipeline_start_timestamp=connector_context.pipeline_start_timestamp,
+ ci_context=connector_context.ci_context,
+ ci_gcs_credentials=connector_context.ci_gcs_credentials,
+ )
+ pypi_context.dagger_client = connector_context.dagger_client
+ return pypi_context
diff --git a/airbyte-ci/connectors/pipelines/poetry.lock b/airbyte-ci/connectors/pipelines/poetry.lock
index e97b0e920b728..ded75cac60a8c 100644
--- a/airbyte-ci/connectors/pipelines/poetry.lock
+++ b/airbyte-ci/connectors/pipelines/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
[[package]]
name = "airbyte-connectors-base-images"
@@ -1150,16 +1150,6 @@ files = [
{file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
@@ -1987,7 +1977,6 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
- {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@@ -1995,15 +1984,8 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
- {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
- {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
- {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
- {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@@ -2020,7 +2002,6 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
- {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@@ -2028,7 +2009,6 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
- {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
@@ -2299,6 +2279,42 @@ files = [
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
+[[package]]
+name = "tomli-w"
+version = "1.0.0"
+description = "A lil' TOML writer"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tomli_w-1.0.0-py3-none-any.whl", hash = "sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463"},
+ {file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"},
+]
+
+[[package]]
+name = "types-requests"
+version = "2.28.2"
+description = "Typing stubs for requests"
+optional = false
+python-versions = "*"
+files = [
+ {file = "types-requests-2.28.2.tar.gz", hash = "sha256:398f88cd9302c796cb63d1021af2a1fb7ae507741a3d508edf8e0746d8c16a04"},
+ {file = "types_requests-2.28.2-py3-none-any.whl", hash = "sha256:c164696bfdce0123901165c5f097a6cc4f6326268c65815d4b6a57eacfec5e81"},
+]
+
+[package.dependencies]
+types-urllib3 = "<1.27"
+
+[[package]]
+name = "types-urllib3"
+version = "1.26.25.14"
+description = "Typing stubs for urllib3"
+optional = false
+python-versions = "*"
+files = [
+ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"},
+ {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"},
+]
+
[[package]]
name = "typing-extensions"
version = "4.9.0"
@@ -2560,4 +2576,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = "~3.10"
-content-hash = "0c7f7c9e18637d2cf9402f22c71502916cd4a1938111dd78eb7874f2c061c1fe"
+content-hash = "de011a00b912c9acd6d4f202c7cf54308010efc14a214ee25608712851306aa9"
diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml
index 226fccfde693e..7acd9b13b6f5f 100644
--- a/airbyte-ci/connectors/pipelines/pyproject.toml
+++ b/airbyte-ci/connectors/pipelines/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "pipelines"
-version = "3.5.3"
+version = "3.6.0"
description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines"
authors = ["Airbyte "]
@@ -27,6 +27,9 @@ segment-analytics-python = "^2.2.3"
pygit2 = "^1.13.1"
asyncclick = "^8.1.3.4"
certifi = "^2023.11.17"
+tomli = "^2.0.1"
+tomli-w = "^1.0.0"
+types-requests = "2.28.2"
[tool.poetry.group.dev.dependencies]
freezegun = "^1.2.2"
diff --git a/airbyte-ci/connectors/pipelines/tests/test_poetry/test_poetry_publish.py b/airbyte-ci/connectors/pipelines/tests/test_poetry/test_poetry_publish.py
new file mode 100644
index 0000000000000..ee345b4183809
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/tests/test_poetry/test_poetry_publish.py
@@ -0,0 +1,83 @@
+#
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+#
+
+from unittest.mock import MagicMock
+
+import pytest
+import requests
+from dagger import Client, Platform
+from pipelines.airbyte_ci.connectors.publish import pipeline as publish_pipeline
+from pipelines.dagger.actions.python.poetry import with_poetry
+from pipelines.models.contexts.python_registry_publish import PythonPackageMetadata, PythonRegistryPublishContext
+from pipelines.models.steps import StepStatus
+
+pytestmark = [
+ pytest.mark.anyio,
+]
+
+
+@pytest.fixture
+def context(dagger_client: Client):
+ context = PythonRegistryPublishContext(
+ package_path="test",
+ version="0.2.0",
+ python_registry_token="test",
+ package_name="test",
+ registry="http://local_registry:8080/",
+ is_local=True,
+ git_branch="test",
+ git_revision="test",
+ report_output_prefix="test",
+ ci_report_bucket="test",
+ )
+ context.dagger_client = dagger_client
+ return context
+
+
+@pytest.mark.parametrize(
+ "package_path, package_name, expected_asset",
+ [
+ pytest.param(
+ "airbyte-integrations/connectors/source-apify-dataset",
+ "airbyte-source-apify-dataset",
+ "airbyte_source_apify_dataset-0.2.0-py3-none-any.whl",
+ id="setup.py project",
+ ),
+ pytest.param(
+ "airbyte-integrations/connectors/destination-duckdb",
+ "destination-duckdb",
+ "destination_duckdb-0.2.0-py3-none-any.whl",
+ id="poetry project",
+ ),
+ ],
+)
+async def test_run_poetry_publish(context: PythonRegistryPublishContext, package_path: str, package_name: str, expected_asset: str):
+ context.package_metadata = PythonPackageMetadata(package_name, "0.2.0")
+ context.package_path = package_path
+ pypi_registry = (
+ # need to use linux/amd64 because the pypiserver image is only available for that platform
+ context.dagger_client.container(platform=Platform("linux/amd64"))
+ .from_("pypiserver/pypiserver:v2.0.1")
+ .with_exec(["run", "-P", ".", "-a", "."])
+ .with_exposed_port(8080)
+ .as_service()
+ )
+
+ base_container = with_poetry(context).with_service_binding("local_registry", pypi_registry)
+ step = publish_pipeline.PublishToPythonRegistry(context)
+ step._get_base_container = MagicMock(return_value=base_container)
+ step_result = await step.run()
+ assert step_result.status == StepStatus.SUCCESS
+
+ # Query the registry to check that the package was published
+ tunnel = await context.dagger_client.host().tunnel(pypi_registry).start()
+ endpoint = await tunnel.endpoint(scheme="http")
+ list_url = f"{endpoint}/simple/"
+ list_response = requests.get(list_url)
+ assert list_response.status_code == 200
+ assert package_name in list_response.text
+ url = f"{endpoint}/simple/{package_name}"
+ response = requests.get(url)
+ assert response.status_code == 200
+ assert expected_asset in response.text
diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py
index b7fe7d764d5fb..ce8913e648abd 100644
--- a/airbyte-ci/connectors/pipelines/tests/test_publish.py
+++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py
@@ -2,8 +2,10 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
import json
+import os
import random
from typing import List
+from unittest.mock import patch
import anyio
import pytest
@@ -153,6 +155,7 @@ def test_parse_spec_output_no_spec(self, publish_context):
(publish_pipeline, "PushConnectorImageToRegistry"),
(publish_pipeline, "PullConnectorImageFromRegistry"),
(publish_pipeline.steps, "run_connector_build"),
+ (publish_pipeline, "CheckPythonRegistryPackageDoesNotExist"),
]
@@ -333,3 +336,73 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist(
publish_pipeline.PullConnectorImageFromRegistry.return_value.run.assert_not_called()
publish_pipeline.UploadSpecToCache.return_value.run.assert_not_called()
publish_pipeline.MetadataUpload.return_value.run.assert_not_called()
+
+
+@pytest.mark.parametrize(
+ "pypi_enabled, pypi_package_does_not_exist_status, publish_step_status, expect_publish_to_pypi_called, expect_build_connector_called",
+ [
+ pytest.param(True, StepStatus.SUCCESS, StepStatus.SUCCESS, True, True, id="happy_path"),
+ pytest.param(False, StepStatus.SUCCESS, StepStatus.SUCCESS, False, True, id="pypi_disabled, skip all pypi steps"),
+ pytest.param(True, StepStatus.SKIPPED, StepStatus.SUCCESS, False, True, id="pypi_package_exists, skip publish_to_pypi"),
+ pytest.param(True, StepStatus.SUCCESS, StepStatus.FAILURE, True, False, id="publish_step_fails, abort"),
+ pytest.param(True, StepStatus.FAILURE, StepStatus.FAILURE, False, False, id="pypi_package_does_not_exist_fails, abort"),
+ ],
+)
+async def test_run_connector_python_registry_publish_pipeline(
+ mocker,
+ pypi_enabled,
+ pypi_package_does_not_exist_status,
+ publish_step_status,
+ expect_publish_to_pypi_called,
+ expect_build_connector_called,
+):
+
+ for module, to_mock in STEPS_TO_PATCH:
+ mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock())
+
+ mocked_publish_to_python_registry = mocker.patch(
+ "pipelines.airbyte_ci.connectors.publish.pipeline.PublishToPythonRegistry", return_value=mocker.AsyncMock()
+ )
+
+ for step in [
+ publish_pipeline.MetadataValidation,
+ publish_pipeline.CheckConnectorImageDoesNotExist,
+ publish_pipeline.UploadSpecToCache,
+ publish_pipeline.MetadataUpload,
+ publish_pipeline.PushConnectorImageToRegistry,
+ publish_pipeline.PullConnectorImageFromRegistry,
+ ]:
+ step.return_value.run.return_value = mocker.Mock(name=f"{step.title}_result", status=StepStatus.SUCCESS)
+
+ mocked_publish_to_python_registry.return_value.run.return_value = mocker.Mock(
+ name="publish_to_python_registry_result", status=publish_step_status
+ )
+
+ publish_pipeline.CheckPythonRegistryPackageDoesNotExist.return_value.run.return_value = mocker.Mock(
+ name="python_registry_package_does_not_exist_result", status=pypi_package_does_not_exist_status
+ )
+
+ context = mocker.MagicMock(
+ ci_gcs_credentials="",
+ pre_release=False,
+ connector=mocker.MagicMock(
+ code_directory="path/to/connector",
+ metadata={"dockerImageTag": "1.2.3", "remoteRegistries": {"pypi": {"enabled": pypi_enabled, "packageName": "test"}}},
+ ),
+ )
+ semaphore = anyio.Semaphore(1)
+ with patch.dict(os.environ, {"PYTHON_REGISTRY_TOKEN": "test"}):
+ await publish_pipeline.run_connector_publish_pipeline(context, semaphore)
+ if expect_publish_to_pypi_called:
+ mocked_publish_to_python_registry.return_value.run.assert_called_once()
+ # assert that the first argument passed to mocked_publish_to_pypi contains the things from the context
+ assert mocked_publish_to_python_registry.call_args.args[0].python_registry_token == "test"
+ assert mocked_publish_to_python_registry.call_args.args[0].package_metadata.name == "test"
+ assert mocked_publish_to_python_registry.call_args.args[0].package_metadata.version == "1.2.3"
+ assert mocked_publish_to_python_registry.call_args.args[0].registry == "https://test.pypi.org/legacy/"
+ assert mocked_publish_to_python_registry.call_args.args[0].package_path == "path/to/connector"
+ else:
+ mocked_publish_to_python_registry.return_value.run.assert_not_called()
+
+ if expect_build_connector_called:
+ publish_pipeline.steps.run_connector_build.assert_called_once()