diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
new file mode 100644
index 0000000..eedd3fc
--- /dev/null
+++ b/.github/workflows/docs.yml
@@ -0,0 +1,29 @@
+name: Deploy Sphinx documentation to Pages
+
+# Runs on pushes targeting the default branch
+on:
+ push:
+ branches: [main]
+# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
+permissions:
+ contents: read
+ pages: write
+ id-token: write
+
+# Allow one concurrent deployment
+concurrency:
+ group: "pages"
+ cancel-in-progress: true
+
+jobs:
+ pages:
+ runs-on: ubuntu-20.04
+ environment:
+ name: github-pages
+ url: ${{ steps.deployment.outputs.page_url }}
+ permissions:
+ pages: write
+ id-token: write
+ steps:
+ - id: deployment
+ uses: sphinx-notes/pages@v3
diff --git a/README.md b/README.md
index 3aa4897..d13d247 100644
--- a/README.md
+++ b/README.md
@@ -186,8 +186,8 @@ for the latest changes.
### Contributing Guide
-__________________
+______________________
All contributions, bug reports, bug fixes, documentation improvements, enhancements are welcome.
-A detailed overview an how to contribute can be found in the [Contributing Guide](https://github.com/astronomer/astro-provider-anyscale/blob/main/CONTRIBUTING.rst)
+A detailed overview an how to contribute can be found in the [Contributing Guide](https://github.com/astronomer/astro-provider-anyscale/blob/main/docs/CONTRIBUTING.rst)
diff --git a/anyscale_provider/hooks/anyscale.py b/anyscale_provider/hooks/anyscale.py
index 8b98d1d..7ab9049 100644
--- a/anyscale_provider/hooks/anyscale.py
+++ b/anyscale_provider/hooks/anyscale.py
@@ -16,10 +16,6 @@ class AnyscaleHook(BaseHook):
"""
This hook handles authenticating and making calls to the Anyscale SDK
- .. seealso::
- For more information on how to use this hook, take a look at the guide:
- :ref:`howto/hook:AnyscaleHook`
-
:param conn_id: Optional. The connection ID to use for Anyscale. Defaults to "anyscale_default".
"""
diff --git a/anyscale_provider/operators/anyscale.py b/anyscale_provider/operators/anyscale.py
index e0ffde3..c6bdf7a 100644
--- a/anyscale_provider/operators/anyscale.py
+++ b/anyscale_provider/operators/anyscale.py
@@ -24,10 +24,6 @@ class SubmitAnyscaleJob(BaseOperator):
with the necessary parameters to define and configure the job, and provides mechanisms
for job submission, status tracking, and handling job outcomes.
- .. seealso::
- For more information on how to use this operator, take a look at the guide:
- :ref:`howto/operator:SubmitAnyscaleJobOperator`
-
:param conn_id: Required. The connection ID for Anyscale.
:param entrypoint: Required. Command that will be run to execute the job, e.g., `python main.py`.
:param name: Optional. Name of the job. Multiple jobs can be submitted with the same name.
@@ -111,6 +107,12 @@ def __init__(
self.job_id: str | None = None
def on_kill(self) -> None:
+ """
+ Terminate the Anyscale job if the task is killed.
+
+ This method will be called when the task is killed, and it sends a termination
+ request for the currently running job.
+ """
if self.job_id is not None:
self.hook.terminate_job(self.job_id, 5)
self.log.info("Termination request received. Submitted request to terminate the anyscale job.")
@@ -122,6 +124,16 @@ def hook(self) -> AnyscaleHook:
return AnyscaleHook(conn_id=self.conn_id)
def execute(self, context: Context) -> None:
+ """
+ Execute the job submission to Anyscale.
+
+ This method submits the job to Anyscale and handles its initial status.
+ It defers the execution to a trigger if the job is still running or starting.
+
+ :param context: The Airflow context.
+ :return: The job ID if the job is successfully submitted and completed, or None if the job is deferred.
+ """
+
job_params: dict[str, Any] = {
"entrypoint": self.entrypoint,
"name": self.name,
@@ -137,6 +149,7 @@ def execute(self, context: Context) -> None:
"project": self.project,
"max_retries": self.max_retries,
}
+
self.log.info(f"Using Anyscale version {anyscale.__version__}")
# Submit the job to Anyscale
job_config = JobConfig(**job_params)
@@ -169,6 +182,16 @@ def execute(self, context: Context) -> None:
raise Exception(f"Unexpected state `{current_state}` for job_id `{self.job_id}`.")
def execute_complete(self, context: Context, event: Any) -> None:
+ """
+ Complete the execution of the job based on the trigger event.
+
+ This method is called when the trigger fires and provides the final status
+ of the job. It raises an exception if the job failed.
+
+ :param context: The Airflow context.
+ :param event: The event data from the trigger.
+ :return: None
+ """
current_job_id = event["job_id"]
if event["state"] == JobState.FAILED:
@@ -186,10 +209,6 @@ class RolloutAnyscaleService(BaseOperator):
configurations and options. It ensures the service is rolled out according to the
specified parameters and handles the deployment lifecycle.
- .. seealso::
- For more information on how to use this operator, take a look at the guide:
- :ref:`howto/operator:RolloutAnyscaleServiceOperator`
-
:param conn_id: Required. The connection ID for Anyscale.
:param name: Required. Unique name of the service.
:param image_uri: Optional. URI of an existing image. Exclusive with `containerfile`.
@@ -299,12 +318,27 @@ def hook(self) -> AnyscaleHook:
return AnyscaleHook(conn_id=self.conn_id)
def on_kill(self) -> None:
+ """
+ Terminate the Anyscale service rollout if the task is killed.
+
+ This method will be called when the task is killed, and it sends a termination
+ request for the currently running service rollout.
+ """
if self.name is not None:
self.hook.terminate_service(self.name, 5)
self.log.info("Termination request received. Submitted request to terminate the anyscale service rollout.")
return
def execute(self, context: Context) -> str | None:
+ """
+ Execute the service rollout to Anyscale.
+
+ This method deploys the service to Anyscale with the provided configuration
+ and parameters. It defers the execution to a trigger if the service is in progress.
+
+ :param context: The Airflow context.
+ :return: The service ID if the rollout is successfully initiated, or None if the job is deferred.
+ """
service_params = {
"name": self.name,
"image_uri": self.image_uri,
@@ -354,6 +388,16 @@ def execute(self, context: Context) -> str | None:
)
def execute_complete(self, context: Context, event: Any) -> None:
+ """
+ Complete the execution of the service rollout based on the trigger event.
+
+ This method is called when the trigger fires and provides the final status
+ of the service rollout. It raises an exception if the rollout failed.
+
+ :param context: The Airflow context.
+ :param event: The event data from the trigger.
+ :return: None
+ """
service_name = event["service_name"]
state = event["state"]
diff --git a/anyscale_provider/triggers/anyscale.py b/anyscale_provider/triggers/anyscale.py
index 551d302..58d6ca7 100644
--- a/anyscale_provider/triggers/anyscale.py
+++ b/anyscale_provider/triggers/anyscale.py
@@ -20,10 +20,6 @@ class AnyscaleJobTrigger(BaseTrigger):
yields events based on the job's status. It handles timeouts and errors during
the polling process.
- .. seealso::
- For more information on how to use this trigger, take a look at the guide:
- :ref:`howto/trigger:AnyscaleJobTrigger`
-
:param conn_id: Required. The connection ID for Anyscale.
:param job_id: Required. The ID of the job to monitor.
:param poll_interval: Optional. Interval in seconds between status checks. Defaults to 60 seconds.
@@ -38,10 +34,19 @@ def __init__(self, conn_id: str, job_id: str, poll_interval: float = 60, fetch_l
@cached_property
def hook(self) -> AnyscaleHook:
- """Return an instance of the AnyscaleHook."""
+ """
+ Return an instance of the AnyscaleHook.
+
+ :return: AnyscaleHook instance configured with the provided connection ID.
+ """
return AnyscaleHook(conn_id=self.conn_id)
def serialize(self) -> tuple[str, dict[str, Any]]:
+ """
+ Serialize the trigger configuration for persistence.
+
+ :return: A tuple containing the path to the trigger class and a dictionary of the trigger's parameters.
+ """
return (
"anyscale_provider.triggers.anyscale.AnyscaleJobTrigger",
{
@@ -52,7 +57,11 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
)
async def run(self) -> AsyncIterator[TriggerEvent]:
+ """
+ Monitor the job status periodically until a terminal state is reached or an error occurs.
+ :yield: TriggerEvent indicating the current status of the job.
+ """
try:
# Loop until reach the terminal state
# TODO: Make this call async
@@ -90,6 +99,12 @@ async def run(self) -> AsyncIterator[TriggerEvent]:
)
def _is_terminal_state(self, job_id: str) -> bool:
+ """
+ Check if the job has reached a terminal state.
+
+ :param job_id: The ID of the job to check the status for.
+ :return: True if the job is in a terminal state, False otherwise.
+ """
job_state = self.hook.get_job_status(job_id).state
self.log.info(f"Current job state for {job_id} is: {job_state}")
return job_state not in (JobState.STARTING, JobState.RUNNING)
@@ -103,10 +118,6 @@ class AnyscaleServiceTrigger(BaseTrigger):
and yields events based on the service's status. It handles timeouts and errors
during the monitoring process.
- .. seealso::
- For more information on how to use this trigger, take a look at the guide:
- :ref:`howto/trigger:AnyscaleServiceTrigger`
-
:param conn_id: Required. The connection ID for Anyscale.
:param service_name: Required. The ID of the service to monitor.
:param expected_state: Required. The expected final state of the service.
@@ -130,10 +141,19 @@ def __init__(
@cached_property
def hook(self) -> AnyscaleHook:
- """Return an instance of the AnyscaleHook."""
+ """
+ Return an instance of the AnyscaleHook.
+
+ :return: AnyscaleHook instance configured with the provided connection ID.
+ """
return AnyscaleHook(conn_id=self.conn_id)
def serialize(self) -> tuple[str, dict[str, Any]]:
+ """
+ Serialize the trigger configuration for persistence.
+
+ :return: A tuple containing the path to the trigger class and a dictionary of the trigger's parameters.
+ """
return (
"anyscale_provider.triggers.anyscale.AnyscaleServiceTrigger",
{
@@ -146,6 +166,11 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
)
async def run(self) -> AsyncIterator[TriggerEvent]:
+ """
+ Monitor the service status periodically until the expected state is reached or an error occurs.
+
+ :yield: TriggerEvent indicating the current status of the service.
+ """
self.log.info(
f"Monitoring service {self.service_name} every {self.poll_interval} seconds to reach {self.expected_state}"
)
@@ -181,6 +206,12 @@ async def run(self) -> AsyncIterator[TriggerEvent]:
)
def _get_current_state(self, service_name: str) -> str:
+ """
+ Get the current status of the specified service.
+
+ :param service_name: The name of the service to check the status for.
+ :return: The current status of the service.
+ """
service_status = self.hook.get_service_status(service_name)
if self.canary_percent is None or self.canary_percent == 100.0:
@@ -192,6 +223,12 @@ def _get_current_state(self, service_name: str) -> str:
return str(service_status.state)
def _check_current_state(self, service_name: str) -> bool:
+ """
+ Check if the service is still in a transitional state.
+
+ :param service_name: The name of the service to check the status for.
+ :return: True if the service is in a transitional state, False otherwise.
+ """
service_state = self._get_current_state(service_name)
self.log.info(f"Current service state for {service_name} is: {service_state}")
return service_state in (
diff --git a/CHANGELOG.rst b/docs/CHANGELOG.rst
similarity index 100%
rename from CHANGELOG.rst
rename to docs/CHANGELOG.rst
diff --git a/CODE_OF_CONDUCT.md b/docs/CODE_OF_CONDUCT.rst
similarity index 86%
rename from CODE_OF_CONDUCT.md
rename to docs/CODE_OF_CONDUCT.rst
index b3378c6..353ec59 100644
--- a/CODE_OF_CONDUCT.md
+++ b/docs/CODE_OF_CONDUCT.rst
@@ -1,6 +1,8 @@
-# Contributor Covenant Code of Conduct
+Contributor Covenant Code of Conduct
+====================================
-## Our Pledge
+Our Pledge
+----------
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
@@ -12,7 +14,8 @@ and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
-## Our Standards
+Our Standards
+-------------
Examples of behavior that contributes to a positive environment for our
community include:
@@ -36,7 +39,8 @@ Examples of unacceptable behavior include:
* Other conduct which could reasonably be considered inappropriate in a
professional setting
-## Enforcement Responsibilities
+Enforcement Responsibilities
+----------------------------
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
@@ -48,7 +52,8 @@ comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
-## Scope
+Scope
+-----
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
@@ -56,7 +61,8 @@ Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
-## Enforcement
+Enforcement
+-----------
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
@@ -66,7 +72,8 @@ All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
-## Enforcement Guidelines
+Enforcement Guidelines
+----------------------
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
@@ -106,23 +113,22 @@ Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
-standards, including sustained inappropriate behavior, harassment of an
+standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
-## Attribution
+Attribution
+-----------
-This Code of Conduct is adapted from the [Contributor Covenant][homepage],
+This Code of Conduct is adapted from the `Contributor Covenant `_,
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct/.
-Community Impact Guidelines were inspired by [Mozilla's code of conduct
-enforcement ladder](https://github.com/mozilla/inclusion).
+Community Impact Guidelines were inspired by `Mozilla's code of conduct
+enforcement ladder `_.
-[homepage]: https://www.contributor-covenant.org/
-
-For answers to common questions about this code of conduct, see the FAQ at
-https://www.contributor-covenant.org/faq/. Translations are available at
+For answers to common questions about this code of conduct, see the `FAQ `_.
+Translations are available at
https://www.contributor-covenant.org/translations/.
diff --git a/CONTRIBUTING.rst b/docs/CONTRIBUTING.rst
similarity index 85%
rename from CONTRIBUTING.rst
rename to docs/CONTRIBUTING.rst
index 9318e2c..f682d62 100644
--- a/CONTRIBUTING.rst
+++ b/docs/CONTRIBUTING.rst
@@ -3,11 +3,11 @@ Contributions
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great.
-Please note that this project is released with a `Contributor Code of Conduct `_.
+Please note that this project is released with a `Contributor Code of Conduct <./docs/CODE_OF_CONDUCT>`_.
By participating in this project you agree to abide by its terms.
Overview
-========
+--------
To contribute to the **Astro Anyscale** project:
@@ -17,10 +17,10 @@ To contribute to the **Astro Anyscale** project:
#. Once developments are complete on your feature branch, request a review and it will be merged once approved.
Test Changes Locally
-====================
+--------------------
Pre-requisites
---------------
+~~~~~~~~~~~~~~
* pytest
@@ -29,14 +29,14 @@ Pre-requisites
pip install pytest
Run tests
----------
+~~~~~~~~~
All tests are inside ``./tests`` directory.
- Just run ``pytest filepath+filename`` to run the tests.
Static Code Checks
-==================
+------------------
We check our code quality via static code checks. The static code checks in astro-provider-anyscale are used to verify
that the code meets certain quality standards. All the static code checks can be run through pre-commit hooks.
@@ -45,7 +45,7 @@ Your code must pass all the static code checks in the CI in order to be eligible
The easiest way to make sure your code is good before pushing is to use pre-commit checks locally.
Pre-Commit
-----------
+~~~~~~~~~~
We use pre-commit to run a number of checks on the code before committing. To install pre-commit, run the following from
your cloned ``astro-provider-anyscale`` directory:
@@ -65,8 +65,20 @@ For details on the pre-commit configuration, refer to the `pre-commit config fil
For more details on each hook and additional configuration options, refer to the official pre-commit documentation: https://pre-commit.com/hooks.html
+Writing Docs
+------------
+
+You can run the docs locally by running the following:
+
+.. code-block:: bash
+
+ hatch run docs:serve
+
+
+This will run the docs server in a virtual environment with the right dependencies. Note that it may take longer on the first run as it sets up the virtual environment, but will be quick on subsequent runs.
+
Building
-========
+--------
We use `hatch `_ to build the project. To build the project, run:
@@ -75,7 +87,7 @@ We use `hatch `_ to build the project. To build t
hatch build
Releasing
-=========
+---------
We use GitHub actions to create and deploy new releases. To create a new release, first create a new version using:
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 0000000..d4bb2cb
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?=
+SPHINXBUILD ?= sphinx-build
+SOURCEDIR = .
+BUILDDIR = _build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/api/anyscale_provider.hooks.rst b/docs/api/anyscale_provider.hooks.rst
new file mode 100644
index 0000000..3e42fca
--- /dev/null
+++ b/docs/api/anyscale_provider.hooks.rst
@@ -0,0 +1,21 @@
+anyscale\_provider.hooks package
+================================
+
+Submodules
+----------
+
+anyscale\_provider.hooks.anyscale module
+----------------------------------------
+
+.. automodule:: anyscale_provider.hooks.anyscale
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Module contents
+---------------
+
+.. automodule:: anyscale_provider.hooks
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/api/anyscale_provider.operators.rst b/docs/api/anyscale_provider.operators.rst
new file mode 100644
index 0000000..54248bb
--- /dev/null
+++ b/docs/api/anyscale_provider.operators.rst
@@ -0,0 +1,21 @@
+anyscale\_provider.operators package
+====================================
+
+Submodules
+----------
+
+anyscale\_provider.operators.anyscale module
+--------------------------------------------
+
+.. automodule:: anyscale_provider.operators.anyscale
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Module contents
+---------------
+
+.. automodule:: anyscale_provider.operators
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/api/anyscale_provider.rst b/docs/api/anyscale_provider.rst
new file mode 100644
index 0000000..0879ca0
--- /dev/null
+++ b/docs/api/anyscale_provider.rst
@@ -0,0 +1,20 @@
+anyscale\_provider package
+==========================
+
+Subpackages
+-----------
+
+.. toctree::
+ :maxdepth: 4
+
+ anyscale_provider.hooks
+ anyscale_provider.operators
+ anyscale_provider.triggers
+
+Module contents
+---------------
+
+.. automodule:: anyscale_provider
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/api/anyscale_provider.triggers.rst b/docs/api/anyscale_provider.triggers.rst
new file mode 100644
index 0000000..0327984
--- /dev/null
+++ b/docs/api/anyscale_provider.triggers.rst
@@ -0,0 +1,21 @@
+anyscale\_provider.triggers package
+===================================
+
+Submodules
+----------
+
+anyscale\_provider.triggers.anyscale module
+-------------------------------------------
+
+.. automodule:: anyscale_provider.triggers.anyscale
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Module contents
+---------------
+
+.. automodule:: anyscale_provider.triggers
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..90b0397
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,33 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# For the full list of built-in configuration values, see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Project information -----------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
+
+project = "astro-provider-anyscale"
+copyright = "2024, Astronomer"
+author = "Astronomer"
+release = "1.0.0"
+
+# -- General configuration ---------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
+
+extensions = [
+ "sphinx.ext.autodoc",
+ "sphinx.ext.autosummary",
+]
+
+add_module_names = False
+autodoc_mock_imports = ["airflow"]
+autoapi_dirs = ["../anyscale_provider"]
+autoapi_ignore = ["*/tests/*"]
+templates_path = ["_templates"]
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "**/tests/*"]
+
+# -- Options for HTML output -------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
+
+html_theme = "pydata_sphinx_theme"
+html_title = "astro-provider-anyscale"
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 0000000..93012fa
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,98 @@
+Welcome to astro-provider-anyscale's documentation!
+===================================================
+
+.. toctree::
+ :maxdepth: 1
+ :caption: Contents:
+
+ Home
+ API Reference
+ Contributing
+ Code of Conduct
+
+This repository provides a set of tools for integrating Anyscale with Apache Airflow, enabling the orchestration of Anyscale jobs and services within Airflow workflows. It includes a custom hook, two operators, and two triggers specifically designed for managing and monitoring Anyscale jobs and services.
+
+Components
+----------
+
+Hook
+~~~~
+- **AnyscaleHook**: Facilitates communication between Airflow and Anyscale. It uses the Anyscale API to interact with the Anyscale platform, providing methods to submit jobs, query their status, and manage services.
+
+Operators
+~~~~~~~~~
+- **SubmitAnyscaleJob**: This operator submits a job to Anyscale. It takes configuration parameters for the job, such as the entry point, build ID, and compute configuration. The operator uses ``AnyscaleHook`` to handle the submission process.
+- **RolloutAnyscaleService**: Similar to the job submission operator, this operator is designed to manage services on Anyscale. It can be used to deploy new services or update existing ones, leveraging ``AnyscaleHook`` for all interactions with the Anyscale API.
+
+Triggers
+~~~~~~~~
+- **AnyscaleJobTrigger**: Monitors the status of asynchronous jobs submitted via the ``SubmitAnyscaleJob`` operator. It ensures that the Airflow task waits until the job is completed before moving forward in the DAG.
+- **AnyscaleServiceTrigger**: Works in a similar fashion to the ``AnyscaleJobTrigger`` but is focused on service rollout processes. It checks the status of the service being deployed or updated and returns control to Airflow upon completion.
+
+Configuration Details for Anyscale Integration
+----------------------------------------------
+
+To integrate Airflow with Anyscale, you will need to provide several configuration details:
+
+- **Anyscale API Token**: Obtain your API token either by using the anyscale cli or through the `Anyscale console `_.
+
+- **Compute Config (optional)**: This ID specifies the machines that will execute your Ray script. You can either:
+
+ - Dynamically provide this via the ``compute_config`` input parameter, or
+ - Create a compute configuration in Anyscale and use the resulting ID in the ``compute_config_id`` parameter.
+
+- **Image URI**: Specify the docker image you would like your operator to use. Make sure your image is accessible within your Anyscale account. Note, you can alternatively specify a containerfile that can be used to dynamically build the image.
+
+Usage
+-----
+
+Install the Anyscale provider using the command below:
+
+.. code-block:: sh
+
+ pip install astro-provider-anyscale
+
+Airflow Connection Configuration
+--------------------------------
+
+To integrate Airflow with Anyscale, configure an Airflow connection with a unique name and set the password as the API token gathered through the Anyscale console.
+
+1. **Access Airflow Web UI:**
+ - Open the Airflow web interface and log in using your Airflow credentials.
+
+2. **Create a New Connection in Airflow:**
+ - Go to the "Admin" tab and select "Connections" from the dropdown menu.
+ - Click the "Add a new record" button to create a new connection.
+
+3. **Configure the Connection:**
+
+ - **Conn Id:** Enter a unique identifier for the connection, e.g., ``anyscale_conn``.
+ - **Conn Type:** Select ``Anyscale``
+ - **Password:** Paste the API token you copied from the Anyscale console.
+
+4. **Save the Connection:**
+ - After filling in the required details, click the "Save" button at the bottom of the form to save the new connection.
+
+Code samples
+------------
+
+The below script is an example of how to configure and use the ``SubmitAnyscaleJob`` operator within an Airflow DAG:
+
+.. literalinclude:: ../example_dags/anyscale_job.py
+
+
+The below script uses the ``RolloutAnyscaleService`` operator to deploy a service on Anyscale:
+
+.. literalinclude:: ../example_dags/anyscale_service.py
+
+Changelog
+---------
+
+We follow `Semantic Versioning `_ for releases.
+Check `CHANGELOG.rst `_
+for the latest changes.
+
+License
+-------
+
+`Apache License 2.0 `_
diff --git a/docs/make.bat b/docs/make.bat
new file mode 100644
index 0000000..51d3652
--- /dev/null
+++ b/docs/make.bat
@@ -0,0 +1,35 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=.
+set BUILDDIR=_build
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.https://www.sphinx-doc.org/
+ exit /b 1
+)
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+
+:end
+popd
diff --git a/netlify.toml b/netlify.toml
new file mode 100644
index 0000000..18bfb5c
--- /dev/null
+++ b/netlify.toml
@@ -0,0 +1,34 @@
+# Settings in the [build] context are global and are applied to
+# all contexts unless otherwise overridden by more specific contexts.
+[build]
+ # Directory to change to before starting a build.
+ base = "docs/"
+
+ # Directory that contains the deploy-ready HTML files and
+ # assets generated by the build. This is relative to the base
+ # directory.
+
+ publish = "_build/dirhtml"
+
+ # Default build command.
+ command = "make dirhtml"
+ environment = { PYTHON_VERSION = "3.8" }
+
+# Production context: all deploys from the Production branch
+# set in your site’s Branches settings in the UI will inherit
+# these settings.
+[context.production]
+ publish = "_build/dirhtml"
+ command = "make dirhtml"
+
+# Deploy Preview context: all deploys generated from
+# a pull/merge request will inherit these settings.
+[context.deploy-preview]
+ publish = "_build/dirhtml"
+
+# The following redirect is intended for use with most SPAs
+# that handle routing internally.
+[[redirects]]
+ from = "/*"
+ to = "/index.html"
+ status = 200
diff --git a/pyproject.toml b/pyproject.toml
index ccc7573..2a3f00f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -49,6 +49,13 @@ include = ["/anyscale_provider"]
[tool.hatch.build.targets.wheel]
packages = ["/anyscale_provider"]
+docs = [
+ "sphinx",
+ "pydata-sphinx-theme",
+ "sphinx-autobuild",
+ "sphinx-autoapi",
+]
+
######################################
# TESTING
######################################
@@ -82,6 +89,24 @@ test-cov = 'sh scripts/test/unit_cov.sh'
test-integration = 'sh scripts/test/integration_test.sh'
static-check = "pre-commit run --all-files"
+######################################
+# DOCS
+######################################
+
+[tool.hatch.envs.docs]
+dependencies = [
+ "aenum",
+ "pydantic>=1.10.0",
+ "pydata-sphinx-theme",
+ "sphinx",
+ "sphinx-autoapi",
+ "sphinx-autobuild",
+]
+
+[tool.hatch.envs.docs.scripts]
+build = "sphinx-build -b html docs docs/_build"
+serve = "sphinx-autobuild docs docs/_build"
+
######################################
# THIRD PARTY TOOLS
######################################