Skip to content

Commit

Permalink
Merge pull request #179 from opsmill/develop
Browse files Browse the repository at this point in the history
Merge develop into stable ahead of 1.2.0 release
  • Loading branch information
dgarros authored Dec 19, 2024
2 parents 079ab59 + da1d89a commit 5d4e2a2
Show file tree
Hide file tree
Showing 17 changed files with 732 additions and 148 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ jobs:
./actionlint -color
shell: bash
env:
SHELLCHECK_OPTS: --exclude=SC2086 --exclude=SC2046 --exclude=SC2004
SHELLCHECK_OPTS: --exclude=SC2086 --exclude=SC2046 --exclude=SC2004 --exclude=SC2129


unit-tests:
Expand All @@ -123,6 +123,7 @@ jobs:
- "3.10"
- "3.11"
- "3.12"
- "3.13"
if: |
always() && !cancelled() &&
!contains(needs.*.result, 'failure') &&
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,32 @@
---
name: Publish Infrahub Python SDK
# yamllint disable rule:truthy
name: Publish Infrahub SDK Package

on: # yamllint disable rule:truthy
push:
tags:
- "v*"
on:
workflow_dispatch:
inputs:
runs-on:
description: "The OS to run the job on"
required: false
default: "ubuntu-22.04"
type: string
publish:
type: boolean
description: Whether to publish the package to Pypi
required: false
default: false
workflow_call:
inputs:
runs-on:
description: "The OS to run the job on"
required: false
default: "ubuntu-22.04"
type: string
publish:
type: boolean
description: Whether to publish the package to Pypi
required: false
default: false

jobs:
publish_to_pypi:
Expand All @@ -25,6 +47,8 @@ jobs:

- name: "Check out repository code"
uses: "actions/checkout@v4"
with:
submodules: true

- name: "Cache poetry venv"
uses: "actions/cache@v4"
Expand All @@ -47,4 +71,5 @@ jobs:
run: "ls -la dist/"

- name: "Poetry push PyPI"
if: ${{ inputs.publish }}
run: "poetry publish"
88 changes: 88 additions & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
---
# yamllint disable rule:truthy rule:line-length
name: New Release

on:
release:
types:
- published

jobs:
check_release:
runs-on: ubuntu-22.04
outputs:
is_prerelease: ${{ steps.release.outputs.is_prerelease }}
is_devrelease: ${{ steps.release.outputs.is_devrelease }}
version: ${{ steps.release.outputs.version }}
major_minor_version: ${{ steps.release.outputs.major_minor_version }}
latest_tag: ${{ steps.release.outputs.latest_tag }}
steps:
- name: "Check out repository code"
uses: "actions/checkout@v4"
with:
submodules: true

- name: "Set up Python"
uses: "actions/setup-python@v5"
with:
python-version: "3.12"

- name: "Install Poetry"
uses: "snok/install-poetry@v1"
with:
virtualenvs-create: true
virtualenvs-in-project: true
installer-parallel: true

- name: "Setup Python environment"
run: |
poetry config virtualenvs.create true --local
poetry env use 3.12
- name: "Install dependencies"
run: "poetry install --no-interaction --no-ansi"

- name: "Check prerelease type"
id: release
run: |
echo is_prerelease=$(poetry run python -c "from packaging.version import Version; print(int(Version('$(poetry version -s)').is_prerelease))") >> "$GITHUB_OUTPUT"
echo is_devrelease=$(poetry run python -c "from packaging.version import Version; print(int(Version('$(poetry version -s)').is_devrelease))") >> "$GITHUB_OUTPUT"
echo "version=$(poetry version -s)" >> "$GITHUB_OUTPUT"
echo major_minor_version=$(poetry run python -c "from packaging.version import Version; print(f\"{Version('$(poetry version -s)').major}.{Version('$(poetry version -s)').minor}\")") >> "$GITHUB_OUTPUT"
echo latest_tag=$(curl -L \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${{ github.token }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${{ github.repository }}/releases/latest \
| jq -r '.tag_name') >> "$GITHUB_OUTPUT"
- name: Check tag version
if: github.event.release.tag_name != format('infrahub-v{0}', steps.release.outputs.version)
run: |
echo "Tag version does not match python project version"
exit 1
- name: Check prerelease and project version
if: github.event.release.prerelease == true && steps.release.outputs.is_prerelease == 0 && steps.release.outputs.is_devrelease == 0
run: |
echo "Cannot pre-release a non pre-release or non dev-release version (${{ steps.release.outputs.version }})"
exit 1
- name: Check release and project version
if: github.event.release.prerelease == false && (steps.release.outputs.is_prerelease == 1 || steps.release.outputs.is_devrelease == 1)
run: |
echo "Cannot release a pre-release or dev-release version (${{ steps.release.outputs.version }})"
exit 1
publish-pypi:
needs: check_release
uses: ./.github/workflows/publish-pypi.yml
secrets: inherit
with:
publish: true

update-submodule:
needs: check_release
uses: ./.github/workflows/update-submodule.yml
secrets: inherit
with:
version: ${{ github.ref_name }}
45 changes: 45 additions & 0 deletions .github/workflows/update-submodule.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
---
# yamllint disable rule:truthy
name: Trigger Submodule update

on:
workflow_dispatch:
inputs:
runs-on:
description: "The OS to run the job on"
required: false
default: "ubuntu-22.04"
type: string
version:
type: string
required: false
description: The string to extract semver from.
default: ''
workflow_call:
inputs:
runs-on:
description: "The OS to run the job on"
required: false
default: "ubuntu-22.04"
type: string
version:
type: string
required: false
description: The string to extract semver from.
default: ''

jobs:
trigger-submodule:
runs-on: ubuntu-22.04
steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Trigger submodule update
run: |
echo "${{ inputs.version }}"
curl -X POST \
-H "Authorization: token ${{ secrets.GH_UPDATE_PACKAGE_OTTO }}" \
-H "Accept: application/vnd.github.v3+json" \
https://api.github.com/repos/opsmill/infrahub/dispatches \
-d "{\"event_type\":\"trigger-submodule-update\", \"client_payload\": {\"version\": \"${{ inputs.version }}\"}}"
2 changes: 1 addition & 1 deletion .yamllint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,6 @@ rules:
# See https://github.com/prettier/prettier/pull/10926 or https://github.com/redhat-developer/vscode-yaml/issues/433
min-spaces-from-content: 1
line-length:
max: 120
max: 140
allow-non-breakable-words: true
allow-non-breakable-inline-mappings: false
15 changes: 10 additions & 5 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,23 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang

<!-- towncrier release notes start -->

## [1.1.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.10.0) - 2024-11-28
## [1.2.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.2.0) - 2024-12-19

### Added

- Added InfrahubClient.schema.wait_until_converged() which allowes you to wait until the schema has converged across all Infrahub workers before proceeding with an operation. The InfrahubClient.schema.load() method has also been updated with a new parameter "wait_until_converged".
- Add batch feature, that use threading, to sync client ([#168](https://github.com/opsmill/infrahub-sdk-python/issues/168))
- Added InfrahubClient.schema.in_sync method to indicate if a specific branch is in sync across all worker types
- Added Python 3.13 to the list of supported versions

### Fixed

- CTL: `schema load` return a proper error message when authentication is missing or when the user doesn't have the permission to update the schema. ([#127](https://github.com/opsmill/infrahub-sdk-python/issues/127))
- CTL: List available transforms and generators if no name is provided ([#140](https://github.com/opsmill/infrahub-sdk-python/issues/140))
- Fix an issue with with `infrahubctl menu load` that would fail while loading the menu

## [1.1.0rc0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.1.0rc0) - 2024-11-26
## [1.1.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.10.0) - 2024-11-28

### Added

- Added InfrahubClient.schema.wait_until_converged() which allowes you to wait until the schema has converged across all Infrahub workers before proceeding with an operation. The InfrahubClient.schema.load() method has also been updated with a new parameter "wait_until_converged".

### Fixed

Expand Down
49 changes: 46 additions & 3 deletions infrahub_sdk/batch.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import asyncio
from collections.abc import AsyncGenerator, Awaitable
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass
from typing import Any, Callable, Optional
from typing import Any, Callable, Generator, Optional

from .node import InfrahubNode
from .node import InfrahubNode, InfrahubNodeSync


@dataclass
Expand All @@ -14,13 +15,32 @@ class BatchTask:
node: Optional[Any] = None


@dataclass
class BatchTaskSync:
task: Callable[..., Any]
args: tuple[Any, ...]
kwargs: dict[str, Any]
node: Optional[InfrahubNodeSync] = None

def execute(self, return_exceptions: bool = False) -> tuple[Optional[InfrahubNodeSync], Any]:
"""Executes the stored task."""
result = None
try:
result = self.task(*self.args, **self.kwargs)
except Exception as exc: # pylint: disable=broad-exception-caught
if return_exceptions:
return self.node, exc
raise exc

return self.node, result


async def execute_batch_task_in_pool(
task: BatchTask, semaphore: asyncio.Semaphore, return_exceptions: bool = False
) -> tuple[Optional[InfrahubNode], Any]:
async with semaphore:
try:
result = await task.task(*task.args, **task.kwargs)

except Exception as exc: # pylint: disable=broad-exception-caught
if return_exceptions:
return (task.node, exc)
Expand Down Expand Up @@ -64,3 +84,26 @@ async def execute(self) -> AsyncGenerator:
if isinstance(result, Exception) and not self.return_exceptions:
raise result
yield node, result


class InfrahubBatchSync:
def __init__(self, max_concurrent_execution: int = 5, return_exceptions: bool = False):
self._tasks: list[BatchTaskSync] = []
self.max_concurrent_execution = max_concurrent_execution
self.return_exceptions = return_exceptions

@property
def num_tasks(self) -> int:
return len(self._tasks)

def add(self, *args: Any, task: Callable[..., Any], node: Optional[Any] = None, **kwargs: Any) -> None:
self._tasks.append(BatchTaskSync(task=task, node=node, args=args, kwargs=kwargs))

def execute(self) -> Generator[tuple[Optional[InfrahubNodeSync], Any], None, None]:
with ThreadPoolExecutor(max_workers=self.max_concurrent_execution) as executor:
futures = [executor.submit(task.execute, return_exceptions=self.return_exceptions) for task in self._tasks]
for future in futures:
node, result = future.result()
if isinstance(result, Exception) and not self.return_exceptions:
raise result
yield node, result
15 changes: 11 additions & 4 deletions infrahub_sdk/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import ujson
from typing_extensions import Self

from .batch import InfrahubBatch
from .batch import InfrahubBatch, InfrahubBatchSync
from .branch import (
BranchData,
InfrahubBranchManager,
Expand Down Expand Up @@ -1454,9 +1454,6 @@ def delete(self, kind: Union[str, type[SchemaTypeSync]], id: str, branch: Option
node = InfrahubNodeSync(client=self, schema=schema, branch=branch, data={"id": id})
node.delete()

def create_batch(self, return_exceptions: bool = False) -> InfrahubBatch:
raise NotImplementedError("This method hasn't been implemented in the sync client yet.")

def clone(self) -> InfrahubClientSync:
"""Return a cloned version of the client using the same configuration"""
return InfrahubClientSync(config=self.config)
Expand Down Expand Up @@ -1955,6 +1952,16 @@ def get(

return results[0]

def create_batch(self, return_exceptions: bool = False) -> InfrahubBatchSync:
"""Create a batch to execute multiple queries concurrently.
Executing the batch will be performed using a thread pool, meaning it cannot guarantee the execution order. It is not recommended to use such
batch to manipulate objects that depend on each others.
"""
return InfrahubBatchSync(
max_concurrent_execution=self.max_concurrent_execution, return_exceptions=return_exceptions
)

def get_list_repositories(
self, branches: Optional[dict[str, BranchData]] = None, kind: str = "CoreGenericRepository"
) -> dict[str, RepositoryData]:
Expand Down
13 changes: 7 additions & 6 deletions infrahub_sdk/ctl/repository.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
from pathlib import Path
from typing import Optional

import typer
import yaml
from pydantic import ValidationError
from rich.console import Console

from infrahub_sdk.ctl.client import initialize_client

from ..async_typer import AsyncTyper
from ..ctl.client import initialize_client
from ..ctl.exceptions import FileNotValidError
from ..ctl.utils import init_logging
from ..graphql import Mutation
Expand Down Expand Up @@ -65,7 +67,7 @@ async def add(
name: str,
location: str,
description: str = "",
username: str = "",
username: Optional[str] = None,
password: str = "",
commit: str = "",
read_only: bool = False,
Expand All @@ -88,10 +90,9 @@ async def add(

client = initialize_client()

if username:
credential = await client.create(kind="CorePasswordCredential", name=name, username=username, password=password)
await credential.save()
input_data["data"]["credential"] = {"id": credential.id}
credential = await client.create(kind="CorePasswordCredential", name=name, username=username, password=password)
await credential.save(allow_upsert=True)
input_data["data"]["credential"] = {"id": credential.id}

query = Mutation(
mutation="CoreReadOnlyRepositoryCreate" if read_only else "CoreRepositoryCreate",
Expand Down
2 changes: 1 addition & 1 deletion infrahub_sdk/ctl/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ async def load(
for schema_file in schemas_data:
console.print(f"[green] schema '{schema_file.location}' loaded successfully")
else:
console.print("[green] The schema in Infrahub was is already up to date, no changes were required")
console.print("[green] The schema in Infrahub was already up to date, no changes were required")

console.print(f"[green] {len(schemas_data)} {schema_definition} processed in {loading_time:.3f} seconds.")

Expand Down
Loading

0 comments on commit 5d4e2a2

Please sign in to comment.