Skip to content

Commit

Permalink
Dynamic bucket name for Tekton test
Browse files Browse the repository at this point in the history
  • Loading branch information
diegolovison committed Oct 25, 2023
1 parent a07b91b commit 3cad149
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 5 deletions.
14 changes: 13 additions & 1 deletion ods_ci/libs/DataSciencePipelinesKfpTekton.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,16 @@ def get_secret(self, api, project, name):
)
return json.loads(secret_json)

def get_bucket_name(self, api, project):
bucket_name, _ = api.run_oc(
f"oc get dspa -n {project} pipelines-definition -o json"
)
objectStorage = json.loads(bucket_name)['spec']['objectStorage']
if 'minio' in objectStorage:
return objectStorage['minio']['bucket']
else:
return objectStorage['externalStorage']['bucket']

def import_souce_code(self, path):
module_name = os.path.basename(path).replace("-", "_")
spec = importlib.util.spec_from_loader(
Expand All @@ -83,6 +93,7 @@ def kfp_tekton_create_run_from_pipeline_func(
):
client, api = self.get_client(user, pwd, project, route_name)
mlpipeline_minio_artifact_secret = self.get_secret(api, project, 'mlpipeline-minio-artifact')
bucket_name = self.get_bucket_name(api, project)
# the current path is from where you are running the script
# sh ods_ci/run_robot_test.sh
# the current_path will be ods-ci
Expand All @@ -97,7 +108,8 @@ def kfp_tekton_create_run_from_pipeline_func(
# if you need to see the yaml, for debugging purpose, call: TektonCompiler().compile(pipeline, f'{fn}.yaml')
result = client.create_run_from_pipeline_func(
pipeline_func=pipeline, arguments={
'mlpipeline_minio_artifact_secret': mlpipeline_minio_artifact_secret["data"]
'mlpipeline_minio_artifact_secret': mlpipeline_minio_artifact_secret["data"],
'bucket_name': bucket_name
}
)
# easy to debug and double check failures
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def receive_file(
shutil.copyfile(incomingfile, saveartifact)


def test_uploaded_artifact(previous_step: kfp.components.InputPath(), file_size_bytes: int, mlpipeline_minio_artifact_secret: str):
def test_uploaded_artifact(previous_step: kfp.components.InputPath(), file_size_bytes: int, mlpipeline_minio_artifact_secret: str, bucket_name: str):
from minio import Minio
import base64
import json
Expand All @@ -76,7 +76,7 @@ def inner_decode(my_str):
secure=secure
)

data = client.get_object('mlpipeline', object_name)
data = client.get_object(bucket_name, object_name)
with open('my-testfile', 'wb') as file_data:
for d in data.stream(32 * 1024):
file_data.write(d)
Expand Down Expand Up @@ -113,7 +113,7 @@ def inner_decode(my_str):
@kfp.dsl.pipeline(
name="Test Data Passing Pipeline 1",
)
def wire_up_pipeline(mlpipeline_minio_artifact_secret):
def wire_up_pipeline(mlpipeline_minio_artifact_secret, bucket_name):
import json

file_size_mb = 20
Expand All @@ -125,6 +125,6 @@ def wire_up_pipeline(mlpipeline_minio_artifact_secret):
send_file_task.output,
).add_pod_annotation(name='artifact_outputs', value=json.dumps(['saveartifact']))

test_uploaded_artifact_op(receive_file_task.output, file_size_bytes, mlpipeline_minio_artifact_secret)
test_uploaded_artifact_op(receive_file_task.output, file_size_bytes, mlpipeline_minio_artifact_secret, bucket_name)


0 comments on commit 3cad149

Please sign in to comment.