Skip to content

Commit

Permalink
Enhance kfp client keywords and add basic DSP acceptance test suite (#…
Browse files Browse the repository at this point in the history
…1828)

* Enhance kfp client bindings. Add convenience kw in PipelinesBackend

Signed-off-by: Jorge Garcia Oncins <[email protected]>

* Sort DSP test suites

Signed-off-by: Jorge Garcia Oncins <[email protected]>

* Fix bug when deleting pipeline runs

Signed-off-by: Jorge Garcia Oncins <[email protected]>

* Add dsp acceptance test suite

Signed-off-by: Jorge Garcia Oncins <[email protected]>

* Reformat files with ruff to fix linter errors

Signed-off-by: Jorge Garcia Oncins <[email protected]>

* Fix some robocop warnings
Signed-off-by: Jorge Garcia Oncins <[email protected]>

---------

Signed-off-by: Jorge Garcia Oncins <[email protected]>
  • Loading branch information
jgarciao authored Sep 19, 2024
1 parent f4738d3 commit c2822a7
Show file tree
Hide file tree
Showing 12 changed files with 534 additions and 47 deletions.
4 changes: 2 additions & 2 deletions ods_ci/libs/DataSciencePipelinesAPI.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@

class DataSciencePipelinesAPI:
# init should not have a call to external system, otherwise dry-run will fail
def __init__(self):
def __init__(self, sleep_time: int = 45):
self.route = ""
self.sa_token = None
self.sleep_time = 45
self.sleep_time = sleep_time

@keyword
def login_and_wait_dsp_route(
Expand Down
359 changes: 352 additions & 7 deletions ods_ci/libs/DataSciencePipelinesKfp.py

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
Documentation Collection of keywords to interact with Data Science Pipelines via CLI
Library OperatingSystem
Library String
Library ../../../../libs/DataSciencePipelinesAPI.py
Library ../../../../libs/DataSciencePipelinesKfp.py
Resource ../../../Resources/OCP.resource


*** Variables ***
${DSPA_PATH}= tests/Resources/Files/pipeline-samples/v2/dspa

Expand Down Expand Up @@ -134,3 +135,77 @@ Create Secret With Pipelines Object Storage Information
[Arguments] ${namespace} ${object_storage_access_key} ${object_storage_secret_key}
Run oc create secret generic dashboard-dspa-secret -n ${namespace} --from-literal=AWS_ACCESS_KEY_ID=${object_storage_access_key} --from-literal=AWS_SECRET_ACCESS_KEY=${object_storage_secret_key}
Run oc label secret dashboard-dspa-secret -n ${namespace} opendatahub.io/dashboard=true


Import Pipeline And Create Run
[Documentation]
[Arguments] ${namespace} ${username} ${password}
... ${pipeline_name} ${pipeline_description} ${pipeline_package_path}
... ${pipeline_run_name} ${pipeline_run_params}
... ${experiment_name}=Default ${experiment_description}=${EMPTY}

DataSciencePipelinesKfp.Setup Client user=${username} pwd=${password} project=${namespace}

${pipeline_id} ${pipeline_version_id}= DataSciencePipelinesKfp.Upload Pipeline
... pipeline_package_path=${pipeline_package_path}
... pipeline_name=${pipeline_name}
... description=${pipeline_description}
... namespace=${namespace}

${experiment_id}= DataSciencePipelinesKfp.Create Experiment name=${experiment_name}
... description=${experiment_description} namespace=${namespace}

${pipeline_run_id}= DataSciencePipelinesKfp.Run Pipeline
... experiment_id=${experiment_id}
... job_name=${pipeline_run_name}
... pipeline_id=${pipeline_id}
... version_id=${pipeline_version_id}
... params=${pipeline_run_params}

RETURN ${pipeline_id} ${pipeline_version_id} ${pipeline_run_id} ${experiment_id}

Wait For Run Completion And Verify Status
[Documentation]
[Arguments] ${namespace} ${username} ${password}
... ${pipeline_run_id} ${pipeline_run_timeout}=180
... ${pipeline_run_expected_status}="SUCCEEDED"

DataSciencePipelinesKfp.Setup Client user=${username} pwd=${password} project=${namespace}

${pipeline_run_status}= DataSciencePipelinesKfp.Wait For Run Completion run_id=${pipeline_run_id}
... timeout=${pipeline_run_timeout} sleep_duration=${5}

IF "${pipeline_run_status}" != "${pipeline_run_expected_status}"
${error_msg}= Catenate Expected pipeline status was ${pipeline_run_expected_status} but pipeline run
... finished with status=${pipeline_run_status}
Fail ${error_msg}
END

RETURN ${pipeline_run_status}

Delete Pipeline And Related Resources
[Documentation] Deletes a pipeline, all it's versions and runs depending on the parameters
... If ${delete_versions}=${TRUE}, deletes all versions
... If ${delete_runs}=${TRUE}, deletes pipeline runs in ${experiment_id}
... If ${experiment_id} is not provided, Default experiment will be used
[Tags] robot:recursive-continue-on-failure
[Arguments] ${namespace} ${username} ${password}
... ${pipeline_id} ${experiment_id}=${EMPTY}
... ${delete_versions}=${TRUE} ${delete_runs}=${TRUE}

DataSciencePipelinesKfp.Setup Client user=${username} pwd=${password} project=${namespace}

IF ${delete_runs}==True
DataSciencePipelinesKfp.Delete All Runs For Pipeline
... namespace=${namespace} pipeline_id=${pipeline_id} experiment_id=${experiment_id}
END

# TODO: delete recurent jobs

IF ${delete_versions}==True
DataSciencePipelinesKfp.Delete All Pipeline Versions ${pipeline_id}
END

DataSciencePipelinesKfp.Delete Pipeline ${pipeline_id}


Original file line number Diff line number Diff line change
Expand Up @@ -13,31 +13,44 @@
modify the yaml file to use PIP_TRUSTED_HOST.
"""

from kfp import compiler, dsl
from kfp import kubernetes

common_base_image = "registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61"
common_base_image = (
"registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61"
)


@dsl.component(base_image=common_base_image, pip_index_urls=['$PIP_INDEX_URL'], pip_trusted_hosts=['$PIP_TRUSTED_HOST'])
@dsl.component(
base_image=common_base_image,
packages_to_install=["pyfiglet==1.0.2"],
pip_index_urls=["$PIP_INDEX_URL"],
pip_trusted_hosts=["$PIP_TRUSTED_HOST"],
)
def print_message(message: str):
import os
from pyfiglet import Figlet

"""Prints a message"""
print("------------------------------------------------------------------")
print(message)
print('pip_index_url:' + os.environ['PIP_INDEX_URL'])
print('pip_trusted_host:' + os.environ['PIP_TRUSTED_HOST'])
print("pip_index_url:" + os.environ["PIP_INDEX_URL"])
print("pip_trusted_host:" + os.environ["PIP_TRUSTED_HOST"])
print("------------------------------------------------------------------")

f = Figlet(font="slant")
print(f.renderText(message))


@dsl.pipeline(name="hello-world-pipeline", description="Pipeline that prints a hello message")
def hello_world_pipeline(message: str = "Hello world"):
print_message_task = print_message(message=message)
print_message_task.set_caching_options(False)
kubernetes.use_config_map_as_env(
print_message_task,
config_map_name='ds-pipeline-custom-env-vars',
config_map_key_to_env={'pip_index_url': 'PIP_INDEX_URL', 'pip_trusted_host': 'PIP_TRUSTED_HOST'}
config_map_name="ds-pipeline-custom-env-vars",
config_map_key_to_env={"pip_index_url": "PIP_INDEX_URL", "pip_trusted_host": "PIP_TRUSTED_HOST"},
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,9 @@ deploymentSpec:
\ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\
\ python3 -m pip install --quiet --no-warn-script-location --index-url $PIP_INDEX_URL\
\ --trusted-host $PIP_TRUSTED_HOST 'kfp==2.9.0' '--no-deps' 'typing-extensions>=3.7.4,<5;\
\ python_version<\"3.9\"' && \"$0\" \"$@\"\n"
\ python_version<\"3.9\"' && python3 -m pip install --quiet --no-warn-script-location\
\ --index-url $PIP_INDEX_URL --trusted-host $PIP_TRUSTED_HOST 'pyfiglet==1.0.2'\
\ && \"$0\" \"$@\"\n"
- sh
- -ec
- 'program_path=$(mktemp -d)
Expand All @@ -38,11 +40,12 @@ deploymentSpec:
'
- "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\
\ *\n\ndef print_message(message: str):\n import os\n \"\"\"Prints\
\ a message\"\"\"\n print(\"------------------------------------------------------------------\"\
)\n print(message)\n print('pip_index_url:' + os.environ['PIP_INDEX_URL'])\n\
\ print('pip_trusted_host:' + os.environ['PIP_TRUSTED_HOST'])\n print(\"\
------------------------------------------------------------------\")\n\n"
\ *\n\ndef print_message(message: str):\n import os\n from pyfiglet\
\ import Figlet\n\n \"\"\"Prints a message\"\"\"\n print(\"------------------------------------------------------------------\"\
)\n print(message)\n print(\"pip_index_url:\" + os.environ[\"PIP_INDEX_URL\"\
])\n print(\"pip_trusted_host:\" + os.environ[\"PIP_TRUSTED_HOST\"])\n\
\ print(\"------------------------------------------------------------------\"\
)\n\n f = Figlet(font=\"slant\")\n print(f.renderText(message))\n\n"
image: registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61
pipelineInfo:
description: Pipeline that prints a hello message
Expand Down
1 change: 1 addition & 0 deletions ods_ci/tests/Resources/RHOSi.resource
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ Resource OCP.resource
... Generate Minio Random Credentials
... Get Minio Credentials
... Create Inference Access Token
... Create Secret With Pipelines Object Storage Information
${ODH_RELEASE_NAME}= Open Data Hub
${RHOAI_SELFMANAGED_RELEASE_NAME}= OpenShift AI Self-Managed
${RHOAI_MANAGED_RELEASE_NAME}= OpenShift AI Cloud Service
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
*** Settings ***
Documentation Basic acceptance test suite for Data Science Pipelines
Resource ../../Resources/RHOSi.resource
Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Projects.resource
Resource ../../Resources/CLI/DataSciencePipelines/DataSciencePipelinesBackend.resource
Test Tags DataSciencePipelines-Backend
Suite Setup Dsp Acceptance Suite Setup
Suite Teardown Dsp Acceptance Suite Teardown


*** Variables ***
${PROJECT}= dsp-acceptance
${PIPELINE_HELLOWORLD_FILEPATH}= tests/Resources/Files/pipeline-samples/v2/pip_index_url/hello_world_pip_index_url_compiled.yaml # robocop: disable:line-too-long


*** Test Cases ***
Verify Pipeline Server Creation With S3 Object Storage
[Documentation] Creates a pipeline server using S3 object storage and verifies that all components are running
[Tags] Smoke
Pass Execution Passing test, as suite setup creates pipeline server

Verify Hello World Pipeline Runs Successfully # robocop: disable:too-long-test-case
[Documentation] Runs a quick hello-world pipeline and verifies that it finishes successfully
[Tags] Smoke
${pipeline_run_params}= Create Dictionary message=Hello world!

${pipeline_id} ${pipeline_version_id} ${pipeline_run_id} ${experiment_id}=
... DataSciencePipelinesBackend.Import Pipeline And Create Run
... namespace=${PROJECT} username=${TEST_USER.USERNAME} password=${TEST_USER.PASSWORD}
... pipeline_name=hello-world
... pipeline_description=A hello world pipeline
... pipeline_package_path=${PIPELINE_HELLOWORLD_FILEPATH}
... pipeline_run_name=hello-wold-run
... pipeline_run_params=${pipeline_run_params}

DataSciencePipelinesBackend.Wait For Run Completion And Verify Status
... namespace=${PROJECT} username=${TEST_USER.USERNAME} password=${TEST_USER.PASSWORD}
... pipeline_run_id=${pipeline_run_id} pipeline_run_timeout=180
... pipeline_run_expected_status=SUCCEEDED

[Teardown] DataSciencePipelinesBackend.Delete Pipeline And Related Resources
... namespace=${PROJECT} username=${TEST_USER.USERNAME} password=${TEST_USER.PASSWORD}
... pipeline_id=${pipeline_id}


*** Keywords ***
Dsp Acceptance Suite Setup
[Documentation] Dsp Acceptance Suite Setup
RHOSi Setup
Projects.Create Data Science Project From CLI ${PROJECT}
DataSciencePipelinesBackend.Create Pipeline Server namespace=${PROJECT}
... object_storage_access_key=${S3.AWS_ACCESS_KEY_ID}
... object_storage_secret_key=${S3.AWS_SECRET_ACCESS_KEY}
... object_storage_endpoint=${S3.BUCKET_2.ENDPOINT}
... object_storage_region=${S3.BUCKET_2.REGION}
... object_storage_bucket_name=${S3.BUCKET_2.NAME}
... dsp_version=v2
DataSciencePipelinesBackend.Wait Until Pipeline Server Is Deployed namespace=${PROJECT}

Dsp Acceptance Suite Teardown
[Documentation] Dsp Acceptance Suite Teardown
Projects.Delete Project Via CLI By Display Name ${PROJECT}
Original file line number Diff line number Diff line change
Expand Up @@ -16,24 +16,10 @@ Suite Teardown RHOSi Teardown


*** Variables ***
${URL_TEST_PIPELINE_RUN_YAML}= https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/main/tests/resources/test-pipeline-run.yaml
${URL_TEST_PIPELINE_RUN_YAML}= https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/main/tests/resources/test-pipeline-run.yaml # robocop: disable:line-too-long


*** Test Cases ***
Verify Pipeline Server Creation With S3 Object Storage
[Documentation] Creates a pipeline server using S3 object storage and verifies that all components are running
[Tags] Smoke
Projects.Create Data Science Project From CLI name=dsp-s3
DataSciencePipelinesBackend.Create Pipeline Server namespace=dsp-s3
... object_storage_access_key=${S3.AWS_ACCESS_KEY_ID}
... object_storage_secret_key=${S3.AWS_SECRET_ACCESS_KEY}
... object_storage_endpoint=${S3.BUCKET_2.ENDPOINT}
... object_storage_region=${S3.BUCKET_2.REGION}
... object_storage_bucket_name=${S3.BUCKET_2.NAME}
... dsp_version=v2
DataSciencePipelinesBackend.Wait Until Pipeline Server Is Deployed namespace=dsp-s3
[Teardown] Projects.Delete Project Via CLI By Display Name dsp-s3

Verify Admin Users Can Create And Run a Data Science Pipeline Using The Api
[Documentation] Creates, runs pipelines with admin user. Double check the pipeline result and clean
... the pipeline resources.
Expand All @@ -51,10 +37,10 @@ Verify Ods Users Can Do Http Request That Must Be Redirected to Https
[Tags] Tier1 ODS-2234
Projects.Create Data Science Project From CLI name=project-redirect-http
DataSciencePipelinesBackend.Create PipelineServer Using Custom DSPA project-redirect-http
${status} Login And Wait Dsp Route ${OCP_ADMIN_USER.USERNAME} ${OCP_ADMIN_USER.PASSWORD}
${status} = Login And Wait Dsp Route ${OCP_ADMIN_USER.USERNAME} ${OCP_ADMIN_USER.PASSWORD}
... project-redirect-http
Should Be True ${status} == 200 Could not login to the Data Science Pipelines Rest API OR DSP routing is not working # robocop: disable:line-too-long
${url} Do Http Request apis/v2beta1/runs
${url} = Do Http Request apis/v2beta1/runs
Should Start With ${url} https
[Teardown] Projects.Delete Project Via CLI By Display Name project-redirect-http

Expand All @@ -76,14 +62,15 @@ Verify DSPO Operator Reconciliation Retry
# Add the missing secret with storage credentials. The DSPO will reconcile and start the pipeline server pods
# Note: as the credentials are dummy, the DSPA status won't be ready, but it's ok because in this test
# we are just testing the DSPO reconciliation
${rc} ${out} = Run And Return Rc And Output oc apply -f ${DSPA_PATH}/dummy-storage-creds.yaml -n ${local_project_name}
${rc} ${out} = Run And Return Rc And Output oc apply -f ${DSPA_PATH}/dummy-storage-creds.yaml -n ${local_project_name} # robocop: disable:line-too-long
IF ${rc}!=0 Fail

# After reconciliation, the project should have at least one pod running
Wait For Pods Number 1 namespace=${local_project_name} timeout=60

[Teardown] Projects.Delete Project Via CLI By Display Name ${local_project_name}


*** Keywords ***
End To End Pipeline Workflow Via Api
[Documentation] Create, run and double check the pipeline result using API.
Expand All @@ -92,13 +79,13 @@ End To End Pipeline Workflow Via Api
Projects.Delete Project Via CLI By Display Name ${project}
Projects.Create Data Science Project From CLI name=${project}
Create PipelineServer Using Custom DSPA ${project}
${status} Login And Wait Dsp Route ${username} ${password} ${project}
${status} = Login And Wait Dsp Route ${username} ${password} ${project}
Should Be True ${status} == 200 Could not login to the Data Science Pipelines Rest API OR DSP routing is not working # robocop: disable:line-too-long
Setup Client ${username} ${password} ${project}
${pipeline_param}= Create Dictionary recipient=integration_test
${run_id} Import Run Pipeline pipeline_url=${URL_TEST_PIPELINE_RUN_YAML} pipeline_params=${pipeline_param}
${run_status} Check Run Status ${run_id}
Should Be Equal As Strings ${run_status} SUCCEEDED Pipeline run doesn't have a status that means success. Check the logs
${pipeline_param} = Create Dictionary recipient=integration_test
${run_id} = Import Run Pipeline From Url pipeline_url=${URL_TEST_PIPELINE_RUN_YAML} pipeline_params=${pipeline_param} # robocop: disable:line-too-long
${run_status} = Check Run Status ${run_id}
Should Be Equal As Strings ${run_status} SUCCEEDED Pipeline run doesn't have a status that means success. Check the logs # robocop: disable:line-too-long
DataSciencePipelinesKfp.Delete Run ${run_id}
[Teardown] Projects.Delete Project Via CLI By Display Name ${project}

Expand All @@ -113,12 +100,12 @@ Verify DSPO Logs Show Error Encountered When Parsing DSPA
${stopped} = Set Variable ${False}
# limit is 180 because the reconciliation run every 2 minutes
${timeout} = Set Variable 180
${pod_name} = Run oc get pods -n ${APPLICATIONS_NAMESPACE} | grep data-science-pipelines-operator | awk '{print $1}'
${pod_name} = Run oc get pods -n ${APPLICATIONS_NAMESPACE} | grep data-science-pipelines-operator | awk '{print $1}' # robocop: disable:line-too-long
Log ${pod_name}
TRY
WHILE not ${stopped} limit=${timeout}
Sleep 1s
${logs} Run oc logs --tail=1000000 ${pod_name} -n ${APPLICATIONS_NAMESPACE}
${logs} = Run oc logs --tail=1000000 ${pod_name} -n ${APPLICATIONS_NAMESPACE}
${stopped} = Set Variable If "Encountered error when parsing CR" in """${logs}""" True False
END
EXCEPT WHILE loop was aborted type=start
Expand Down

0 comments on commit c2822a7

Please sign in to comment.