Skip to content

Commit

Permalink
Ray and Pipelines integration test
Browse files Browse the repository at this point in the history
  • Loading branch information
diegolovison committed Dec 12, 2023
1 parent b7f8664 commit e25ce49
Show file tree
Hide file tree
Showing 4 changed files with 118 additions and 5 deletions.
6 changes: 6 additions & 0 deletions ods_ci/libs/DataSciencePipelinesAPI.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,6 +310,12 @@ def get_default_storage(self):
break
return storage_class['metadata']['name']

def get_openshift_server(self):
return self.run_oc('oc whoami --show-server=true')[0].replace('\n', '')

def get_openshift_token(self):
return self.run_oc('oc whoami --show-token=true')[0].replace('\n', '')

def run_oc(self, command):
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
output, error = process.communicate()
Expand Down
4 changes: 3 additions & 1 deletion ods_ci/libs/DataSciencePipelinesKfpTekton.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,9 @@ def kfp_tekton_create_run_from_pipeline_func(
result = client.create_run_from_pipeline_func(
pipeline_func=pipeline, arguments={
'mlpipeline_minio_artifact_secret': mlpipeline_minio_artifact_secret["data"],
'bucket_name': bucket_name
'bucket_name': bucket_name,
'openshift_server': self.api.get_openshift_server(),
'openshift_token': self.api.get_openshift_token()
}
)
# easy to debug and double check failures
Expand Down
88 changes: 88 additions & 0 deletions ods_ci/tests/Resources/Files/pipeline-samples/ray_integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
from kfp import components, dsl
from ods_ci.libs.DataSciencePipelinesKfpTekton import DataSciencePipelinesKfpTekton


def ray_fn(openshift_server:str, openshift_token:str) -> int:
from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration
from codeflare_sdk.cluster.auth import TokenAuthentication
import ray

print('before login')
auth = TokenAuthentication(
token=openshift_token,
server=openshift_server,
skip_tls=True
)
auth_return = auth.login()
print(f'auth_return: "{auth_return}"')
print('after login')
cluster = Cluster(ClusterConfiguration(
name='raytest',
# namespace must exist, and it is the same from 432__data-science-pipelines-tekton.robot
namespace='pipelineskfptekton1',
num_workers=1,
head_cpus='500m',
min_memory=1,
max_memory=1,
num_gpus=0,
image="quay.io/project-codeflare/ray:latest-py39-cu118",
instascale=False
))
# workaround for https://github.com/project-codeflare/codeflare-sdk/pull/412
cluster_file_name = '/opt/app-root/src/.codeflare/appwrapper/raytest.yaml'
# Read in the file
with open(cluster_file_name, 'r') as file:
filedata = file.read()

# Replace the target string
filedata = filedata.replace('busybox:1.28', 'quay.io/project-codeflare/busybox:latest')

# Write the file out again
with open(cluster_file_name, 'w') as file:
file.write(filedata)
# end workaround

# always clean the resources
cluster.down()
print(cluster.status())
cluster.up()
cluster.wait_ready()
print(cluster.status())
print(cluster.details())

ray_dashboard_uri = cluster.cluster_dashboard_uri()
ray_cluster_uri = cluster.cluster_uri()
print(ray_dashboard_uri)
print(ray_cluster_uri)

# before proceeding make sure the cluster exists and the uri is not empty
assert ray_cluster_uri, "Ray cluster needs to be started and set before proceeding"

# reset the ray context in case there's already one.
ray.shutdown()
# establish connection to ray cluster
ray.init(address=ray_cluster_uri)
print("Ray cluster is up and running: ", ray.is_initialized())

@ray.remote
def train_fn():
return 100

result = ray.get(train_fn.remote())
assert 100 == result
ray.shutdown()
cluster.down()
auth.logout()
return result


@dsl.pipeline(
name="Ray Integration Test",
description="Ray Integration Test",
)
def ray_integration(openshift_server, openshift_token):
ray_op = components.create_component_from_func(
ray_fn, base_image=DataSciencePipelinesKfpTekton.base_image,
packages_to_install=['codeflare-sdk']
)
ray_op(openshift_server, openshift_token)
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,20 @@ ${PROJECT_NAME}= pipelineskfptekton1


*** Test Cases ***
Verify Ods Users Can Create And Run A Data Science Pipeline Using The Kfp_tekton Python Package
Verify Ods Users Can Create And Run A Data Science Pipeline Using The kfp_tekton Python Package
[Documentation] Creates, runs pipelines with regular user. Double check the pipeline result and clean
... the pipeline resources.
[Tags] Sanity
... Tier1
... ODS-2203
End To End Pipeline Workflow Using Kfp_tekton
End To End Pipeline Workflow Using Kfp Tekton
... username=${TEST_USER.USERNAME}
... password=${TEST_USER.PASSWORD}
... project=${PROJECT_NAME}
... python_file=flip_coin.py
... method_name=flipcoin_pipeline
... status_check_timeout=440
End To End Pipeline Workflow Using Kfp_tekton
End To End Pipeline Workflow Using Kfp Tekton
... username=${TEST_USER.USERNAME}
... password=${TEST_USER.PASSWORD}
... project=${PROJECT_NAME}
Expand All @@ -44,6 +43,24 @@ Verify Ods Users Can Create And Run A Data Science Pipeline Using The Kfp_tekton
[Teardown] Remove Pipeline Project ${PROJECT_NAME}


Verify Ods Users Can Create And Run A Data Science Pipeline With Ray Using The kfp_tekton Python Package
[Documentation] Creates, runs pipelines with regular user. Double check the pipeline result and clean
... the pipeline resources.
[Tags] Sanity
... Tier1
... ODS-2541
Skip If Component Is Not Enabled ray
Skip If Component Is Not Enabled codeflare
End To End Pipeline Workflow Using Kfp Tekton
... username=${TEST_USER.USERNAME}
... password=${TEST_USER.PASSWORD}
... project=${PROJECT_NAME}
... python_file=ray_integration.py
... method_name=ray_integration
... status_check_timeout=440
[Teardown] Remove Pipeline Project ${PROJECT_NAME}


*** Keywords ***
# robocop: disable:line-too-long
End To End Pipeline Workflow Using Kfp Tekton
Expand Down

0 comments on commit e25ce49

Please sign in to comment.