Skip to content

Commit

Permalink
Merge branch 'master' of github.com:flyteorg/flytesnacks into feature…
Browse files Browse the repository at this point in the history
…/pathlib-replace-ospath
  • Loading branch information
pingsutw authored and MortalHappiness committed May 16, 2024
2 parents 2d920af + ffcfb44 commit 2146424
Show file tree
Hide file tree
Showing 21 changed files with 292 additions and 35 deletions.
33 changes: 22 additions & 11 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ jobs:
# TODO: Register and update the examples below. (onnx_plugin, feast_integration, etc)
echo "PACKAGES=$(find examples -maxdepth 1 -type d -exec basename '{}' \; \
| grep -v -e 'testing' -e 'examples' \
| grep -v -e 'airflow_plugin' -e 'forecasting_sales' -e 'onnx_plugin' -e 'feast_integration' -e 'modin_plugin' -e 'sagemaker_inference_agent' \
| grep -v -e 'airflow_plugin' -e 'forecasting_sales' -e 'onnx_plugin' -e 'feast_integration' -e 'modin_plugin' -e 'sagemaker_inference_agent' -e 'mnist_classifier' \
| sort \
| jq --raw-input . \
| jq --slurp . \
Expand Down Expand Up @@ -90,26 +90,37 @@ jobs:
pip install uv
uv venv
source .venv/bin/activate
uv pip install flytekit flytekitplugins-envd
if [ -f requirements.in ]; then uv pip install -r requirements.in; fi
uv pip install "flytekit>=1.12.1b0" flytekitplugins-envd
pip freeze
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
if: ${{ github.event_name != 'pull_request' }}
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ secrets.FLYTE_BOT_USERNAME }}
password: ${{ secrets.FLYTE_BOT_PAT }}
- name: Build and push default image
- name: Check if dockerfile exists
working-directory: examples/${{ matrix.example }}
id: dockerfile
run: |
if [ -f Dockerfile ]; then
tag1=ghcr.io/flyteorg/flytecookbook:${{ matrix.example }}-${{ github.sha }}
tag2=ghcr.io/flyteorg/flytecookbook:latest
docker build -t "$tag1" -t "$tag2" .
if ${{ github.event_name != 'pull_request' }}; then
docker push ghcr.io/flyteorg/flytecookbook --all-tags
fi
if [ -f Dockerfile ]
then
echo "exist=true" >> "$GITHUB_OUTPUT"
else
echo "exist=false" >> "$GITHUB_OUTPUT"
fi
- name: Build and push default image
if: ${{ steps.dockerfile.outputs.exist == 'true' }}
uses: docker/build-push-action@v5
with:
context: examples/${{ matrix.example }}
push: ${{ github.event_name != 'pull_request' }}
tags: ghcr.io/flyteorg/flytecookbook:${{ matrix.example }}-${{ github.sha }},ghcr.io/flyteorg/flytecookbook:latest
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Pyflyte package
working-directory: examples/${{ matrix.example }}
run: |
Expand Down Expand Up @@ -287,7 +298,7 @@ jobs:
pip install uv
uv venv
source .venv/bin/activate
uv pip install --upgrade pip flytekit flytekitplugins-deck-standard torch
uv pip install "flytekit>=1.12.1b0" flytekitplugins-deck-standard torch tabulate
pip freeze
- name: Checkout flytesnacks
uses: actions/checkout@v3
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/monodocs_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,4 +51,4 @@ jobs:
FLYTESNACKS_LOCAL_PATH: ${{ github.workspace }}/flytesnacks
run: |
conda activate monodocs-env
make -C docs html SPHINXOPTS="-W -vvv"
make -C docs html SPHINXOPTS="-W"
2 changes: 1 addition & 1 deletion examples/basics/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ ENV VENV /opt/venv
RUN python3 -m venv ${VENV}
ENV PATH="${VENV}/bin:$PATH"

RUN pip install flytekit==1.10.0 flytekitplugins-envd
RUN pip install flytekit==1.10.0 flytekitplugins-envd "fsspec<2024.5.0"

# Copy the actual code
COPY . /root
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
StructuredDatasetEncoder,
StructuredDatasetTransformerEngine,
)
from tabulate import tabulate
from typing_extensions import Annotated


Expand Down Expand Up @@ -203,6 +202,8 @@ class CompanyField:

@task(container_image=image)
def create_parquet_file() -> StructuredDataset:
from tabulate import tabulate

df = pd.json_normalize(data, max_level=0)
print("original dataframe: \n", tabulate(df, headers="keys", tablefmt="psql"))

Expand All @@ -211,48 +212,62 @@ def create_parquet_file() -> StructuredDataset:

@task(container_image=image)
def print_table_by_arg(sd: MyArgDataset) -> pd.DataFrame:
from tabulate import tabulate

t = sd.open(pd.DataFrame).all()
print("MyArgDataset dataframe: \n", tabulate(t, headers="keys", tablefmt="psql"))
return t


@task(container_image=image)
def print_table_by_dict(sd: MyDictDataset) -> pd.DataFrame:
from tabulate import tabulate

t = sd.open(pd.DataFrame).all()
print("MyDictDataset dataframe: \n", tabulate(t, headers="keys", tablefmt="psql"))
return t


@task(container_image=image)
def print_table_by_list_dict(sd: MyDictListDataset) -> pd.DataFrame:
from tabulate import tabulate

t = sd.open(pd.DataFrame).all()
print("MyDictListDataset dataframe: \n", tabulate(t, headers="keys", tablefmt="psql"))
return t


@task(container_image=image)
def print_table_by_top_dataclass(sd: MyTopDataClassDataset) -> pd.DataFrame:
from tabulate import tabulate

t = sd.open(pd.DataFrame).all()
print("MyTopDataClassDataset dataframe: \n", tabulate(t, headers="keys", tablefmt="psql"))
return t


@task(container_image=image)
def print_table_by_top_dict(sd: MyTopDictDataset) -> pd.DataFrame:
from tabulate import tabulate

t = sd.open(pd.DataFrame).all()
print("MyTopDictDataset dataframe: \n", tabulate(t, headers="keys", tablefmt="psql"))
return t


@task(container_image=image)
def print_table_by_second_dataclass(sd: MySecondDataClassDataset) -> pd.DataFrame:
from tabulate import tabulate

t = sd.open(pd.DataFrame).all()
print("MySecondDataClassDataset dataframe: \n", tabulate(t, headers="keys", tablefmt="psql"))
return t


@task(container_image=image)
def print_table_by_nested_dataclass(sd: MyNestedDataClassDataset) -> pd.DataFrame:
from tabulate import tabulate

t = sd.open(pd.DataFrame).all()
print("MyNestedDataClassDataset dataframe: \n", tabulate(t, headers="keys", tablefmt="psql"))
return t
Expand Down
2 changes: 1 addition & 1 deletion examples/databricks_agent/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM databricksruntime/standard:12.2-LTS
FROM databricksruntime/standard:14.3-LTS
LABEL org.opencontainers.image.source=https://github.com/flyteorg/flytesnacks

ENV VENV /opt/venv
Expand Down
2 changes: 1 addition & 1 deletion examples/databricks_plugin/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM databricksruntime/standard:12.2-LTS
FROM databricksruntime/standard:14.3-LTS
LABEL org.opencontainers.image.source=https://github.com/flyteorg/flytesnacks

ENV VENV /opt/venv
Expand Down
4 changes: 2 additions & 2 deletions examples/development_lifecycle/development_lifecycle/decks.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import flytekit
from flytekit import ImageSpec, task
from flytekitplugins.deck.renderer import MarkdownRenderer
from flytekit.deck.renderer import MarkdownRenderer
from sklearn.decomposition import PCA

# Create a new deck named `pca` and render Markdown content along with a
Expand Down Expand Up @@ -127,7 +127,7 @@ def table_renderer() -> None:
# and renders it as a Unicode string on the deck.
import inspect

from flytekitplugins.deck.renderer import SourceCodeRenderer
from flytekit.deck.renderer import SourceCodeRenderer


@task(enable_deck=True)
Expand Down
1 change: 1 addition & 0 deletions examples/development_lifecycle/requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@ flytekitplugins-deck-standard
flytekitplugins-envd
plotly
scikit-learn
tabulate
2 changes: 1 addition & 1 deletion examples/k8s_dask_plugin/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# ######################
# NOTE: For CI/CD only #
########################
FROM ubuntu:focal
FROM ubuntu:latest
LABEL org.opencontainers.image.source https://github.com/flyteorg/flytesnacks

WORKDIR /root
Expand Down
2 changes: 1 addition & 1 deletion examples/k8s_dask_plugin/k8s_dask_plugin/dask_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# %% [markdown]
# Create an `ImageSpec` to encompass all the dependencies needed for the Dask task.
# %%
custom_image = ImageSpec(name="flyte-dask-plugin", registry="ghcr.io/flyteorg", packages=["flytekitplugins-dask"])
custom_image = ImageSpec(registry="ghcr.io/flyteorg", packages=["flytekitplugins-dask"])

# %% [markdown]
# :::{important}
Expand Down
2 changes: 1 addition & 1 deletion examples/k8s_spark_plugin/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# ######################
# NOTE: For CI/CD only #
########################
FROM apache/spark-py:3.3.1
FROM apache/spark-py:v3.4.0
LABEL org.opencontainers.image.source https://github.com/flyteorg/flytesnacks

WORKDIR /root
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,7 @@
from flytekit import ImageSpec, Resources, kwtypes, task, workflow
from flytekit.types.structured.structured_dataset import StructuredDataset
from flytekitplugins.spark import Spark

try:
from typing import Annotated
except ImportError:
from typing_extensions import Annotated
from typing_extensions import Annotated

# %% [markdown]
# Create an `ImageSpec` to automate the retrieval of a prebuilt Spark image.
Expand Down
14 changes: 11 additions & 3 deletions examples/kfpytorch_plugin/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,21 @@ To enable the plugin in the backend, follow instructions outlined in the {std:re

## Run the example on the Flyte cluster

To run the provided example on the Flyte cluster, use the following command:
To run the provided examples on the Flyte cluster, use the following commands:

Distributed pytorch training:

```
pyflyte run --remote pytorch_mnist.py pytorch_training_wf
```

Pytorch lightning training:

```
pyflyte run --remote pytorch_mnist.py \
pytorch_training_wf
pyflyte run --remote pytorch_lightning_mnist_autoencoder.py train_workflow
```

```{auto-examples-toc}
pytorch_mnist
pytorch_lightning_mnist_autoencoder
```
Loading

0 comments on commit 2146424

Please sign in to comment.