diff --git a/ods_ci/libs/DataSciencePipelinesAPI.py b/ods_ci/libs/DataSciencePipelinesAPI.py index 58fca3316..a1c75fe72 100644 --- a/ods_ci/libs/DataSciencePipelinesAPI.py +++ b/ods_ci/libs/DataSciencePipelinesAPI.py @@ -14,45 +14,7 @@ class DataSciencePipelinesAPI: def __init__(self): self.route = "" self.sa_token = None - - @keyword - def wait_until_openshift_pipelines_operator_is_deployed(self): - """ - when creating at the first time, it can take like 1 minute to have the pods ready - """ - deployment_count = 0 - count = 0 - while deployment_count != 1 and count < 30: - deployments = [] - response, _ = self.run_oc("oc get deployment -n openshift-operators openshift-pipelines-operator -o json") - try: - response = json.loads(response) - if ( - response["metadata"]["name"] == "openshift-pipelines-operator" - and "readyReplicas" in response["status"] - and response["status"]["readyReplicas"] == 1 - ): - deployments.append(response) - except JSONDecodeError: - pass - deployment_count = len(deployments) - time.sleep(1) - count += 1 - pipeline_run_crd_count = 0 - count = 0 - while pipeline_run_crd_count < 1 and count < 60: - # https://github.com/opendatahub-io/odh-dashboard/issues/1673 - # It is possible to start the Pipeline Server without pipelineruns.tekton.dev CRD - pipeline_run_crd_count = self.count_pods("oc get crd pipelineruns.tekton.dev", 1) - time.sleep(1) - count += 1 - assert pipeline_run_crd_count == 1 - return self.count_running_pods( - "oc get pods -n openshift-operators -l name=openshift-pipelines-operator -o json", - "openshift-pipelines-operator", - "Running", - 1, - ) + self.sleep_time = 45 @keyword def login_and_wait_dsp_route( @@ -60,7 +22,7 @@ def login_and_wait_dsp_route( user, pwd, project, - route_name="ds-pipeline-pipelines-definition", + route_name="ds-pipeline-dspa", timeout=120, ): print("Fetch token") @@ -89,7 +51,7 @@ def login_and_wait_dsp_route( assert self.route != "", "Route must not be empty" print(f"Waiting for Data Science Pipeline route to be ready to avoid firing false alerts: {self.route}") - time.sleep(45) + time.sleep(self.sleep_time) status = -1 count = 0 while status != 200 and count < timeout: @@ -102,8 +64,8 @@ def login_and_wait_dsp_route( # if you need to debug, try to print also the response print(f"({count}): Data Science Pipeline HTTP Status: {status}") if status != 200: - time.sleep(30) - count += 30 + time.sleep(self.sleep_time) + count += self.sleep_time return status @keyword @@ -121,112 +83,6 @@ def remove_pipeline_project(self, project): time.sleep(1) count += 1 - @keyword - def create_pipeline(self, url_test_pipeline_run_yaml): - print("Creating a pipeline from data science pipelines stack") - test_pipeline_run_yaml, _ = self.do_get(url_test_pipeline_run_yaml) - filename = "test_pipeline_run_yaml.yaml" - with open(filename, "w", encoding="utf-8") as f: - f.write(test_pipeline_run_yaml) - with open(filename, "rb") as f: - response, _ = self.do_upload( - f"https://{self.route}/apis/v1beta1/pipelines/upload", - files={"uploadfile": f}, - headers={"Authorization": f"Bearer {self.sa_token}"}, - ) - os.remove(filename) - pipeline_json = json.loads(response) - pipeline_id = pipeline_json["id"] - response, status = self.do_get( - f"https://{self.route}/apis/v1beta1/pipelines/{pipeline_id}", - headers={"Authorization": f"Bearer {self.sa_token}"}, - ) - assert status == 200 - assert json.loads(response)["name"] == filename - return pipeline_id - - @keyword - def create_run(self, pipeline_id): - print("Creating the run from uploaded pipeline") - response, status = self.do_post( - f"https://{self.route}/apis/v1beta1/runs", - headers={ - "Authorization": f"Bearer {self.sa_token}", - "Content-Type": "application/json", - }, - json={ - "name": "test-pipeline-run", - "pipeline_spec": {"pipeline_id": f"{pipeline_id}"}, - }, - ) - assert status == 200 - run_json = json.loads(response) - run_id = run_json["run"]["id"] - - response, status = self.do_get( - f"https://{self.route}/apis/v1beta1/runs/{run_id}", - headers={"Authorization": f"Bearer {self.sa_token}"}, - ) - assert status == 200 - - return run_id - - @keyword - def check_run_status(self, run_id, timeout=160): - run_status = None - count = 0 - run_finished_ok = False - while not run_finished_ok and count < timeout: - response, status = self.do_get( - f"https://{self.route}/apis/v1beta1/runs/{run_id}", - headers={"Authorization": f"Bearer {self.sa_token}"}, - ) - try: - run_json = json.loads(response) - if "run" in run_json and "status" in run_json["run"]: - run_status = run_json["run"]["status"] - except JSONDecodeError: - print(response, status) - print(f"Checking run status: {run_status}") - if run_status == "Failed": - break - # https://github.com/tektoncd/pipeline/blob/main/docs/pipelineruns.md#monitoring-execution-status - if run_status in ("Completed", "Succeeded"): - run_finished_ok = True - break - time.sleep(1) - count += 1 - return run_finished_ok - - @keyword - def delete_runs(self, run_id): - print("Deleting the runs") - - response, status = self.do_delete( - f"https://{self.route}/apis/v1beta1/runs/{run_id}", - headers={"Authorization": f"Bearer {self.sa_token}"}, - ) - assert status == 200 - response, status = self.do_get( - f"https://{self.route}/apis/v1beta1/runs/{run_id}", - headers={"Authorization": f"Bearer {self.sa_token}"}, - ) - assert status == 404 - - @keyword - def delete_pipeline(self, pipeline_id): - print("Deleting the pipeline") - response, status = self.do_delete( - f"https://{self.route}/apis/v1beta1/pipelines/{pipeline_id}", - headers={"Authorization": f"Bearer {self.sa_token}"}, - ) - assert status == 200 - response, status = self.do_get( - f"https://{self.route}/apis/v1beta1/pipelines/{pipeline_id}", - headers={"Authorization": f"Bearer {self.sa_token}"}, - ) - assert status == 404 - @keyword def add_role_to_user(self, name, user, project): output, error = self.run_oc(f"oc policy add-role-to-user {name} {user} -n {project} --role-namespace={project}") @@ -309,8 +165,11 @@ def run_oc(self, command): output, error = process.communicate() return self.byte_to_str(output), error - def do_get(self, url, headers=None): - response = requests.get(url, headers=headers, verify=self.get_cert()) + def do_get(self, url, headers=None, skip_ssl=False): + if skip_ssl: + response = requests.get(url, headers=headers, verify=False) + else: + response = requests.get(url, headers=headers, verify=self.get_cert()) return self.byte_to_str(response.content), response.status_code def do_post(self, url, headers, json): @@ -330,6 +189,7 @@ def byte_to_str(self, content): def get_secret(self, project, name): secret_json, _ = self.run_oc(f"oc get secret -n {project} {name} -o json") + assert len(secret_json) > 0 return json.loads(secret_json) def get_cert(self): @@ -337,7 +197,7 @@ def get_cert(self): cert = cert_json["data"]["tls.crt"] decoded_cert = base64.b64decode(cert).decode("utf-8") - file_name = "/tmp/kft-cert" + file_name = "/tmp/kfp-cert" cert_file = open(file_name, "w") cert_file.write(decoded_cert) cert_file.close() diff --git a/ods_ci/libs/DataSciencePipelinesKfpTekton.py b/ods_ci/libs/DataSciencePipelinesKfp.py similarity index 51% rename from ods_ci/libs/DataSciencePipelinesKfpTekton.py rename to ods_ci/libs/DataSciencePipelinesKfp.py index 6a0846619..c0fefa704 100644 --- a/ods_ci/libs/DataSciencePipelinesKfpTekton.py +++ b/ods_ci/libs/DataSciencePipelinesKfp.py @@ -1,15 +1,14 @@ -import base64 import importlib import json import os import sys - +import time from DataSciencePipelinesAPI import DataSciencePipelinesAPI from robotlibcore import keyword from urllib3.exceptions import MaxRetryError, SSLError -class DataSciencePipelinesKfpTekton: +class DataSciencePipelinesKfp: base_image = ( "registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61" ) @@ -19,43 +18,37 @@ def __init__(self): self.client = None self.api = None - def get_client(self, user, pwd, project, route_name): + def get_client(self, user, pwd, project, route_name='ds-pipeline-dspa'): if self.client is None: self.api = DataSciencePipelinesAPI() self.api.login_and_wait_dsp_route(user, pwd, project, route_name) # initialize global environment variables # https://github.com/kubeflow/kfp-tekton/issues/1345 - default_image = DataSciencePipelinesKfpTekton.base_image + default_image = DataSciencePipelinesKfp.base_image os.environ["DEFAULT_STORAGE_CLASS"] = self.api.get_default_storage() os.environ["TEKTON_BASH_STEP_IMAGE"] = default_image os.environ["TEKTON_COPY_RESULTS_STEP_IMAGE"] = default_image os.environ["CONDITION_IMAGE_NAME"] = default_image # https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes os.environ["DEFAULT_ACCESSMODES"] = "ReadWriteOnce" - import kfp_tekton + from kfp.client import Client # the following fallback it is to simplify the test development try: # we assume it is a secured cluster # ssl_ca_cert came from /path/to/python/lib/python3.x/site-packages/certifi/cacert.pem # that certificate is "Mozilla's carefully curated collection of root certificates" - self.client = kfp_tekton.TektonClient( - host=f"https://{self.api.route}/", existing_token=self.api.sa_token - ) + self.client = Client(host=f"https://{self.api.route}/", existing_token=self.api.sa_token) except MaxRetryError as e: # we assume it is a cluster with self-signed certs if type(e.reason) == SSLError: # try to retrieve the certificate - self.client = kfp_tekton.TektonClient( - host=f"https://{self.api.route}/", - existing_token=self.api.sa_token, - ssl_ca_cert=self.api.get_cert(), - ) + self.client = Client(host=f"https://{self.api.route}/", existing_token=self.api.sa_token, ssl_ca_cert=self.api.get_cert()) return self.client, self.api def get_bucket_name(self, api, project): - bucket_name, _ = api.run_oc(f"oc get dspa -n {project} pipelines-definition -o json") + bucket_name, _ = api.run_oc(f"oc get dspa -n {project} dspa -o json") objectStorage = json.loads(bucket_name)["spec"]["objectStorage"] if "minio" in objectStorage: return objectStorage["minio"]["bucket"] @@ -71,11 +64,56 @@ def import_souce_code(self, path): return module @keyword - def kfp_tekton_create_run_from_pipeline_func( - self, user, pwd, project, route_name, source_code, fn, current_path=None + def setup_client(self, user, pwd, project): + # force a new client + self.client = None + self.get_client(user, pwd, project) + + @keyword + def import_run_pipeline(self, pipeline_url, pipeline_params): + print(f'pipeline_params({type(pipeline_params)}): {pipeline_params}') + print(f'downloading: {pipeline_url}') + test_pipeline_run_yaml, _ = self.api.do_get(pipeline_url, skip_ssl=True) + pipeline_file = "/tmp/test_pipeline_run_yaml.yaml" + with open(pipeline_file, "w", encoding="utf-8") as f: + f.write(test_pipeline_run_yaml) + print(f'{pipeline_url} content stored at {pipeline_file}') + print('create a run from pipeline') + response = self.client.create_run_from_pipeline_package( + pipeline_file=pipeline_file, + arguments=pipeline_params + ) + print(response) + return response.run_id + + @keyword + def check_run_status(self, run_id, timeout=160): + count = 0 + while count < timeout: + response = self.client.get_run(run_id) + run_status = response.state + print(f"Checking run status: {run_status}") + if run_status == "FAILED": + break + if run_status == "SUCCEEDED": + break + time.sleep(1) + count += 1 + return run_status + + @keyword + def delete_run(self, run_id): + response = self.client.delete_run(run_id) + # means success + assert len(response) == 0 + + @keyword + def create_run_from_pipeline_func( + self, user, pwd, project, source_code, fn, pipeline_params={}, current_path=None, route_name='ds-pipeline-dspa' ): + print(f'pipeline_params: {pipeline_params}') client, api = self.get_client(user, pwd, project, route_name) - mlpipeline_minio_artifact_secret = api.get_secret(project, "mlpipeline-minio-artifact") + mlpipeline_minio_artifact_secret = api.get_secret(project, "ds-pipeline-s3-dspa") bucket_name = self.get_bucket_name(api, project) # the current path is from where you are running the script # sh ods_ci/run_robot_test.sh @@ -83,28 +121,30 @@ def kfp_tekton_create_run_from_pipeline_func( if current_path is None: current_path = os.getcwd() my_source = self.import_souce_code( - f"{current_path}/ods_ci/tests/Resources/Files/pipeline-samples/{source_code}" + f"{current_path}/ods_ci/tests/Resources/Files/pipeline-samples/v2/{source_code}" ) pipeline = getattr(my_source, fn) + # pipeline_params + # there are some special keys to retrieve argument values dynamically + # in pipeline v2, we must match the parameters names + if 'mlpipeline_minio_artifact_secret' in pipeline_params: + pipeline_params['mlpipeline_minio_artifact_secret'] = str(mlpipeline_minio_artifact_secret["data"]) + if 'bucket_name' in pipeline_params: + pipeline_params['bucket_name'] = bucket_name + if 'openshift_server' in pipeline_params: + pipeline_params['openshift_server'] = self.api.get_openshift_server() + if 'openshift_token' in pipeline_params: + pipeline_params['openshift_token'] = self.api.get_openshift_token() + print(f'pipeline_params modified with dynamic values: {pipeline_params}') + # create_run_from_pipeline_func will compile the code # if you need to see the yaml, for debugging purpose, call: TektonCompiler().compile(pipeline, f'{fn}.yaml') result = client.create_run_from_pipeline_func( pipeline_func=pipeline, - arguments={ - "mlpipeline_minio_artifact_secret": mlpipeline_minio_artifact_secret["data"], - "bucket_name": bucket_name, - "openshift_server": self.api.get_openshift_server(), - "openshift_token": self.api.get_openshift_token(), - }, + arguments=pipeline_params ) # easy to debug and double check failures print(result) - return result + return result.run_id - # we are calling DataSciencePipelinesAPI because of https://github.com/kubeflow/kfp-tekton/issues/1223 - # Waiting for a backport https://github.com/kubeflow/kfp-tekton/pull/1234 - @keyword - def kfp_tekton_wait_for_run_completion(self, user, pwd, project, route_name, run_result, timeout=160): - _, api = self.get_client(user, pwd, project, route_name) - return api.check_run_status(run_result.run_id, timeout=timeout) diff --git a/ods_ci/tests/Resources/Files/data-science-pipelines-reconciliation.yaml b/ods_ci/tests/Resources/Files/data-science-pipelines-reconciliation.yaml index 6c18db166..563330bfe 100644 --- a/ods_ci/tests/Resources/Files/data-science-pipelines-reconciliation.yaml +++ b/ods_ci/tests/Resources/Files/data-science-pipelines-reconciliation.yaml @@ -1,8 +1,9 @@ apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 kind: DataSciencePipelinesApplication metadata: - name: sample + name: dspa spec: + dspVersion: v2 objectStorage: externalStorage: bucket: rhods-dsp-dev diff --git a/ods_ci/tests/Resources/Files/data-science-pipelines-sample.yaml b/ods_ci/tests/Resources/Files/data-science-pipelines-sample.yaml index a4a875276..24c1bdb02 100644 --- a/ods_ci/tests/Resources/Files/data-science-pipelines-sample.yaml +++ b/ods_ci/tests/Resources/Files/data-science-pipelines-sample.yaml @@ -1,12 +1,13 @@ apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 kind: DataSciencePipelinesApplication metadata: - name: pipelines-definition + name: dspa spec: # One of minio or externalStorage must be specified for objectStorage # This example illustrates minimal deployment with minio # This is NOT supported and should be used for dev testing/experimentation only. # See dspa_simple_external_storage.yaml for an example with external connection. + dspVersion: v2 objectStorage: minio: # Image field is required diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/flip_coin.py b/ods_ci/tests/Resources/Files/pipeline-samples/v1/flip_coin.py similarity index 89% rename from ods_ci/tests/Resources/Files/pipeline-samples/flip_coin.py rename to ods_ci/tests/Resources/Files/pipeline-samples/v1/flip_coin.py index e5bff93f7..fe11d3e96 100644 --- a/ods_ci/tests/Resources/Files/pipeline-samples/flip_coin.py +++ b/ods_ci/tests/Resources/Files/pipeline-samples/v1/flip_coin.py @@ -15,7 +15,7 @@ # source https://github.com/kubeflow/kfp-tekton/blob/master/samples/flip-coin/condition.py from kfp import components, dsl -from ods_ci.libs.DataSciencePipelinesKfpTekton import DataSciencePipelinesKfpTekton +from ods_ci.libs.DataSciencePipelinesKfp import DataSciencePipelinesKfp def random_num(low: int, high: int) -> int: @@ -46,10 +46,10 @@ def print_msg(msg: str): description="Shows how to use dsl.Condition().", ) def flipcoin_pipeline(): - flip_coin_op = components.create_component_from_func(flip_coin, base_image=DataSciencePipelinesKfpTekton.base_image) - print_op = components.create_component_from_func(print_msg, base_image=DataSciencePipelinesKfpTekton.base_image) + flip_coin_op = components.create_component_from_func(flip_coin, base_image=DataSciencePipelinesKfp.base_image) + print_op = components.create_component_from_func(print_msg, base_image=DataSciencePipelinesKfp.base_image) random_num_op = components.create_component_from_func( - random_num, base_image=DataSciencePipelinesKfpTekton.base_image + random_num, base_image=DataSciencePipelinesKfp.base_image ) flip = flip_coin_op() diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/flip_coin_compiled.yaml b/ods_ci/tests/Resources/Files/pipeline-samples/v1/flip_coin_compiled.yaml similarity index 100% rename from ods_ci/tests/Resources/Files/pipeline-samples/flip_coin_compiled.yaml rename to ods_ci/tests/Resources/Files/pipeline-samples/v1/flip_coin_compiled.yaml diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/iris_pipeline_compiled.yaml b/ods_ci/tests/Resources/Files/pipeline-samples/v1/iris_pipeline_compiled.yaml similarity index 100% rename from ods_ci/tests/Resources/Files/pipeline-samples/iris_pipeline_compiled.yaml rename to ods_ci/tests/Resources/Files/pipeline-samples/v1/iris_pipeline_compiled.yaml diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/ray_integration.py b/ods_ci/tests/Resources/Files/pipeline-samples/v1/ray_integration.py similarity index 93% rename from ods_ci/tests/Resources/Files/pipeline-samples/ray_integration.py rename to ods_ci/tests/Resources/Files/pipeline-samples/v1/ray_integration.py index eb587cb4d..e7640c76e 100644 --- a/ods_ci/tests/Resources/Files/pipeline-samples/ray_integration.py +++ b/ods_ci/tests/Resources/Files/pipeline-samples/v1/ray_integration.py @@ -1,6 +1,6 @@ from kfp import components, dsl -from ods_ci.libs.DataSciencePipelinesKfpTekton import DataSciencePipelinesKfpTekton +from ods_ci.libs.DataSciencePipelinesKfp import DataSciencePipelinesKfp def ray_fn(openshift_server: str, openshift_token: str) -> int: @@ -69,7 +69,7 @@ def train_fn(): def ray_integration(openshift_server, openshift_token): ray_op = components.create_component_from_func( ray_fn, - base_image=DataSciencePipelinesKfpTekton.base_image, + base_image=DataSciencePipelinesKfp.base_image, packages_to_install=["codeflare-sdk"], ) ray_op(openshift_server, openshift_token) diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/take_nap.py b/ods_ci/tests/Resources/Files/pipeline-samples/v1/take_nap.py similarity index 100% rename from ods_ci/tests/Resources/Files/pipeline-samples/take_nap.py rename to ods_ci/tests/Resources/Files/pipeline-samples/v1/take_nap.py diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/take_nap_compiled.yaml b/ods_ci/tests/Resources/Files/pipeline-samples/v1/take_nap_compiled.yaml similarity index 100% rename from ods_ci/tests/Resources/Files/pipeline-samples/take_nap_compiled.yaml rename to ods_ci/tests/Resources/Files/pipeline-samples/v1/take_nap_compiled.yaml diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/upload_download.py b/ods_ci/tests/Resources/Files/pipeline-samples/v1/upload_download.py similarity index 93% rename from ods_ci/tests/Resources/Files/pipeline-samples/upload_download.py rename to ods_ci/tests/Resources/Files/pipeline-samples/v1/upload_download.py index f77693b3e..8020f970e 100644 --- a/ods_ci/tests/Resources/Files/pipeline-samples/upload_download.py +++ b/ods_ci/tests/Resources/Files/pipeline-samples/v1/upload_download.py @@ -2,7 +2,7 @@ import kfp -from ods_ci.libs.DataSciencePipelinesKfpTekton import DataSciencePipelinesKfpTekton +from ods_ci.libs.DataSciencePipelinesKfp import DataSciencePipelinesKfp """Producer""" @@ -94,18 +94,18 @@ def inner_decode(my_str): """Build the producer component""" send_file_op = kfp.components.create_component_from_func( send_file, - base_image=DataSciencePipelinesKfpTekton.base_image, + base_image=DataSciencePipelinesKfp.base_image, ) """Build the consumer component""" receive_file_op = kfp.components.create_component_from_func( receive_file, - base_image=DataSciencePipelinesKfpTekton.base_image, + base_image=DataSciencePipelinesKfp.base_image, ) test_uploaded_artifact_op = kfp.components.create_component_from_func( test_uploaded_artifact, - base_image=DataSciencePipelinesKfpTekton.base_image, + base_image=DataSciencePipelinesKfp.base_image, packages_to_install=["minio"], ) diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/upload_download_compiled.yaml b/ods_ci/tests/Resources/Files/pipeline-samples/v1/upload_download_compiled.yaml similarity index 100% rename from ods_ci/tests/Resources/Files/pipeline-samples/upload_download_compiled.yaml rename to ods_ci/tests/Resources/Files/pipeline-samples/v1/upload_download_compiled.yaml diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/v2/flip_coin.py b/ods_ci/tests/Resources/Files/pipeline-samples/v2/flip_coin.py new file mode 100644 index 000000000..7d11950cc --- /dev/null +++ b/ods_ci/tests/Resources/Files/pipeline-samples/v2/flip_coin.py @@ -0,0 +1,65 @@ +# Copyright 2020 kubeflow.org +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# source https://github.com/kubeflow/kfp-tekton/blob/master/samples/flip-coin/condition.py +from kfp import components, dsl + +from ods_ci.libs.DataSciencePipelinesKfp import DataSciencePipelinesKfp + + +@dsl.component +def random_num(low: int, high: int) -> int: + """Generate a random number between low and high.""" + import random + + result = random.randint(low, high) + print(result) + return result + + +@dsl.component +def flip_coin() -> str: + """Flip a coin and output heads or tails randomly.""" + import random + + result = "heads" if random.randint(0, 1) == 0 else "tails" + print(result) + return result + + +@dsl.component +def print_msg(msg: str): + """Print a message.""" + print(msg) + + +@dsl.pipeline( + name="conditional-execution-pipeline", + description="Shows how to use dsl.Condition().", +) +def flipcoin_pipeline(): + flip = flip_coin() + with dsl.Condition(flip.output == "heads"): + random_num_head = random_num(low=0, high=9) + with dsl.Condition(random_num_head.output > 5): + print_msg(msg="heads and %s > 5!" % random_num_head.output) + with dsl.Condition(random_num_head.output <= 5): + print_msg(msg="heads and %s <= 5!" % random_num_head.output) + + with dsl.Condition(flip.output == "tails"): + random_num_tail = random_num(low=10, high=19) + with dsl.Condition(random_num_tail.output > 15): + print_msg(msg="tails and %s > 15!" % random_num_tail.output) + with dsl.Condition(random_num_tail.output <= 15): + print_msg(msg="tails and %s <= 15!" % random_num_tail.output) diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/v2/ray_integration.py b/ods_ci/tests/Resources/Files/pipeline-samples/v2/ray_integration.py new file mode 100644 index 000000000..1f8fde1e7 --- /dev/null +++ b/ods_ci/tests/Resources/Files/pipeline-samples/v2/ray_integration.py @@ -0,0 +1,70 @@ +from kfp import dsl +from ods_ci.libs.DataSciencePipelinesKfp import DataSciencePipelinesKfp + + +@dsl.component(packages_to_install=['codeflare-sdk'], base_image=DataSciencePipelinesKfp.base_image) +def ray_fn(openshift_server: str, openshift_token: str) -> int: + import ray + from codeflare_sdk.cluster.auth import TokenAuthentication + from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration + + print("before login") + auth = TokenAuthentication(token=openshift_token, server=openshift_server, skip_tls=True) + auth_return = auth.login() + print(f'auth_return: "{auth_return}"') + print("after login") + cluster = Cluster( + ClusterConfiguration( + name="raytest", + # namespace must exist, and it is the same from 432__data-science-pipelines-tekton.robot + namespace="pipelineskfp1", + num_workers=1, + head_cpus="500m", + min_memory=1, + max_memory=1, + num_gpus=0, + image="quay.io/project-codeflare/ray:latest-py39-cu118", + instascale=False, + ) + ) + + # always clean the resources + cluster.down() + print(cluster.status()) + cluster.up() + cluster.wait_ready() + print(cluster.status()) + print(cluster.details()) + + ray_dashboard_uri = cluster.cluster_dashboard_uri() + ray_cluster_uri = cluster.cluster_uri() + print(ray_dashboard_uri) + print(ray_cluster_uri) + + # before proceeding make sure the cluster exists and the uri is not empty + assert ray_cluster_uri, "Ray cluster needs to be started and set before proceeding" + + # reset the ray context in case there's already one. + ray.shutdown() + # establish connection to ray cluster + ray.init(address=ray_cluster_uri) + print("Ray cluster is up and running: ", ray.is_initialized()) + + @ray.remote + def train_fn(): + return 100 + + result = ray.get(train_fn.remote()) + assert 100 == result + ray.shutdown() + cluster.down() + auth.logout() + return result + + +@dsl.pipeline( + name="Ray Integration Test", + description="Ray Integration Test", +) +def ray_integration(openshift_server: str, openshift_token: str): + ray_fn(openshift_server=openshift_server, openshift_token=openshift_token) diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/v2/upload_download.py b/ods_ci/tests/Resources/Files/pipeline-samples/v2/upload_download.py new file mode 100644 index 000000000..fdd2dc3d1 --- /dev/null +++ b/ods_ci/tests/Resources/Files/pipeline-samples/v2/upload_download.py @@ -0,0 +1,106 @@ +"""Test pipeline to exercise various data flow mechanisms.""" +import kfp +from ods_ci.libs.DataSciencePipelinesKfp import DataSciencePipelinesKfp + + +@kfp.dsl.component(base_image=DataSciencePipelinesKfp.base_image) +def send_file( + file_size_bytes: int, + outgoingfile: kfp.dsl.OutputPath(), +): + import os + import zipfile + + def create_large_file(file_path, size_in_bytes): + with open(file_path, "wb") as f: + f.write(os.urandom(size_in_bytes)) + + def zip_file(input_file_path, output_zip_path): + with zipfile.ZipFile(output_zip_path, "w", compression=zipfile.ZIP_DEFLATED) as zipf: + zipf.write(input_file_path, os.path.basename(input_file_path)) + + print("starting creating the file...") + file_path = "/tmp/large_file.txt" + create_large_file(file_path, file_size_bytes) + zip_file(file_path, outgoingfile) + print(f"saved: {outgoingfile}") + + +@kfp.dsl.component(base_image=DataSciencePipelinesKfp.base_image) +def receive_file( + incomingfile: kfp.dsl.InputPath(), + saveartifact: kfp.dsl.OutputPath(), +): + import os + import shutil + + print("reading %s, size is %s" % (incomingfile, os.path.getsize(incomingfile))) + + with open(incomingfile, "rb") as f: + b = f.read(1) + print("read byte: %s" % b) + f.close() + + print("copying in %s to out %s" % (incomingfile, saveartifact)) + shutil.copyfile(incomingfile, saveartifact) + + +@kfp.dsl.component(packages_to_install=['minio'], base_image=DataSciencePipelinesKfp.base_image) +def test_uploaded_artifact( + previous_step: kfp.dsl.InputPath(), + file_size_bytes: int, + mlpipeline_minio_artifact_secret: str, + bucket_name: str, +): + import base64 + import json + from minio import Minio + + def inner_decode(my_str): + return base64.b64decode(my_str).decode("utf-8") + + mlpipeline_minio_artifact_secret = json.loads(mlpipeline_minio_artifact_secret.replace("\'", "\"")) + host = inner_decode(mlpipeline_minio_artifact_secret["host"]) + port = inner_decode(mlpipeline_minio_artifact_secret["port"]) + access_key = inner_decode(mlpipeline_minio_artifact_secret["accesskey"]) + secret_key = inner_decode(mlpipeline_minio_artifact_secret["secretkey"]) + secure = inner_decode(mlpipeline_minio_artifact_secret["secure"]) + secure = secure.lower() == "true" + client = Minio(f"{host}:{port}", access_key=access_key, secret_key=secret_key, secure=secure) + + store_object = previous_step.replace(f'/s3/{bucket_name}/', '') + print(f'parsing {previous_step} to {store_object} ') + data = client.get_object(bucket_name, store_object) + + with open("my-testfile", "wb") as file_data: + for d in data.stream(32 * 1024): + file_data.write(d) + bytes_written = file_data.tell() + + print(file_size_bytes, bytes_written) + diff = round((bytes_written / file_size_bytes) - 1, 3) + print(diff) + # if not matching, the test will fail + assert diff == 0 + + +@kfp.dsl.pipeline( + name="Test Data Passing Pipeline 1", +) +def wire_up_pipeline(mlpipeline_minio_artifact_secret: str, bucket_name: str): + + file_size_mb = 20 + file_size_bytes = file_size_mb * 1024 * 1024 + + send_file_task = send_file(file_size_bytes=file_size_bytes) + + receive_file_task = receive_file( + incomingfile=send_file_task.output, + ) + + test_uploaded_artifact( + previous_step=receive_file_task.output, + file_size_bytes=file_size_bytes, + mlpipeline_minio_artifact_secret=mlpipeline_minio_artifact_secret, + bucket_name=bucket_name + ) diff --git a/ods_ci/tests/Resources/Files/redhat-openshift-pipelines.yaml b/ods_ci/tests/Resources/Files/redhat-openshift-pipelines.yaml deleted file mode 100644 index 05e87cfbf..000000000 --- a/ods_ci/tests/Resources/Files/redhat-openshift-pipelines.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: operators.coreos.com/v1alpha1 -kind: Subscription -metadata: - name: openshift-pipelines-operator-rh - namespace: openshift-operators -spec: - channel: latest - installPlanApproval: Automatic - name: openshift-pipelines-operator-rh - source: redhat-operators - sourceNamespace: openshift-marketplace diff --git a/ods_ci/tests/Resources/Page/Operators/OpenShiftPipelines.resource b/ods_ci/tests/Resources/Page/Operators/OpenShiftPipelines.resource deleted file mode 100644 index d26acd102..000000000 --- a/ods_ci/tests/Resources/Page/Operators/OpenShiftPipelines.resource +++ /dev/null @@ -1,35 +0,0 @@ -*** Settings *** -Documentation Test suite for OpenShift Pipeline - -Resource ../../RHOSi.resource -Resource ../../ODS.robot -Resource ../../Common.robot -Resource ../../Page/ODH/ODHDashboard/ODHDashboard.robot -Resource ../../Page/OCPDashboard/OCPMenu.robot -Library DateTime -Library ../../libs/DataSciencePipelinesAPI.py - - -*** Variables *** -${REDHAT_OPENSHIFT_PIPELINES_YAML} ods_ci/tests/Resources/Files/redhat-openshift-pipelines.yaml - - -*** Keywords *** -# robocop: disable=too-many-calls-in-keyword,line-too-long -Install Red Hat OpenShift Pipelines - [Documentation] Install the latest Red Hat OpenShift Pipelines Operator by default. A different version - ... can be installed by setting OPENSHIFT_PIPELINES_CHANNEL in test-variables.yml . - ... Example: if the value of OPENSHIFT_PIPELINES_CHANNEL is pipelines-1.12 will install the latest 1.12 version - ... that currently is 1.12.2 - ${return_code}= Run And Return Rc - ... sed -i "s,channel: .*,channel: ${OPENSHIFT_PIPELINES_CHANNEL},g" ${EXECDIR}/${REDHAT_OPENSHIFT_PIPELINES_YAML} - Should Be Equal As Integers ${return_code} 0 - Oc Apply kind=Subscription src=${REDHAT_OPENSHIFT_PIPELINES_YAML} - ${pod_count}= Wait Until OpenShift Pipelines Operator Is Deployed - Should Be True ${pod_count} == 1 msg=Error installing OpenShift Pipelines operator - ${rc} ${output}= Run And Return Rc And Output - ... oc get csv -ojson | jq -r '.items[] | select(.metadata.name | test("openshift-pipelines-operator")) | .spec.version' - Should Be Equal As Integers ${rc} 0 - Log A OpenShift Pipelines Operator of version ${output} is installed console=True - - diff --git a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/430__data-science-pipelines-operator-dependency.robot b/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/430__data-science-pipelines-operator-dependency.robot deleted file mode 100644 index 664d69b28..000000000 --- a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/430__data-science-pipelines-operator-dependency.robot +++ /dev/null @@ -1,42 +0,0 @@ -*** Settings *** -Documentation Suite to test Data Science Pipeline Operator feature using RHODS UI -Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Projects.resource -Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/DataConnections.resource -Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Pipelines.resource -Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource -Resource ../../../Resources/Page/Operators/OpenShiftPipelines.resource -Resource ../../../Resources/Page/Operators/ISVs.resource -Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDashboard.robot -Test Tags DataSciencePipelines -Suite Setup Pipelines Suite Setup -Suite Teardown Pipelines Suite Teardown - - -*** Variables *** - - -*** Test Cases *** -Verify Dashboard Disables Pipelines When OpenShift Pipelines Operator Is Not Installed - [Documentation] Dashboard verifies OpenShift Pipelines operator disables Pipelines if not installed - [Tags] Smoke Tier1 - ... ODS-2274 - ${pipelines_op_installed}= Check If Operator Is Installed Via CLI operator_name=openshift-pipelines-operator-rh - Launch Data Science Project Main Page username=${TEST_USER.USERNAME} password=${TEST_USER.PASSWORD} - IF ${pipelines_op_installed} - Verify Pipelines Are Enabled - ELSE - Verify Pipelines Are Disabled - END - -*** Keywords *** -Pipelines Suite Setup - [Documentation] Suite setup steps for testing operator availability. It creates some test variables - ... and runs RHOSi setup - RHOSi Setup - Set Library Search Order SeleniumLibrary - -Pipelines Suite Teardown - [Documentation] Suite setup steps for testing operator availability. It teardown the RHOSi setup - RHOSi Teardown - - diff --git a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/431__data-science-pipelines-api.robot b/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/431__data-science-pipelines-api.robot index b77ff66aa..b805b8e39 100644 --- a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/431__data-science-pipelines-api.robot +++ b/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/431__data-science-pipelines-api.robot @@ -4,38 +4,34 @@ Resource ../../../Resources/RHOSi.resource Resource ../../../Resources/ODS.robot Resource ../../../Resources/Common.robot Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDashboard.robot -Resource ../../../Resources/Page/Operators/OpenShiftPipelines.resource Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource Library DateTime Library ../../../../libs/DataSciencePipelinesAPI.py +Library ../../../../libs/DataSciencePipelinesKfp.py Test Tags DataSciencePipelines Suite Setup Data Science Pipelines Suite Setup Suite Teardown RHOSi Teardown *** Variables *** -${URL_TEST_PIPELINE_RUN_YAML}= https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/73b95d89536c79c4d34606cf8ea1499bd986a4b6/tests/resources/test-pipeline-run.yaml +${URL_TEST_PIPELINE_RUN_YAML}= https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/main/tests/resources/test-pipeline-run.yaml *** Test Cases *** Verify Ods Users Can Create And Run a Data Science Pipeline Using The Api [Documentation] Creates, runs pipelines with admin and regular user. Double check the pipeline result and clean ... the pipeline resources. - [Tags] Sanity - ... Tier1 - ... ODS-2083 + [Tags] Sanity Tier1 ODS-2083 End To End Pipeline Workflow Via Api ${OCP_ADMIN_USER.USERNAME} ${OCP_ADMIN_USER.PASSWORD} pipelinesapi1 End To End Pipeline Workflow Via Api ${TEST_USER.USERNAME} ${TEST_USER.PASSWORD} pipelinesapi2 Verify Ods Users Can Do Http Request That Must Be Redirected to Https [Documentation] Verify Ods Users Can Do Http Request That Must Be Redirected to Https - [Tags] Sanity - ... Tier1 - ... ODS-2234 + [Tags] Sanity Tier1 ODS-2234 New Project project-redirect-http Install DataSciencePipelinesApplication CR project-redirect-http ${status} Login And Wait Dsp Route ${OCP_ADMIN_USER.USERNAME} ${OCP_ADMIN_USER.PASSWORD} - ... project-redirect-http ds-pipeline-pipelines-definition + ... project-redirect-http Should Be True ${status} == 200 Could not login to the Data Science Pipelines Rest API OR DSP routing is not working # robocop: disable:line-too-long ${url} Do Http Request apis/v1beta1/runs Should Start With ${url} https @@ -65,14 +61,14 @@ End To End Pipeline Workflow Via Api Remove Pipeline Project ${project} New Project ${project} Install DataSciencePipelinesApplication CR ${project} - ${status} Login And Wait Dsp Route ${username} ${password} ${project} ds-pipeline-pipelines-definition + ${status} Login And Wait Dsp Route ${username} ${password} ${project} Should Be True ${status} == 200 Could not login to the Data Science Pipelines Rest API OR DSP routing is not working # robocop: disable:line-too-long - ${pipeline_id} Create Pipeline ${URL_TEST_PIPELINE_RUN_YAML} - ${run_id} Create Run ${pipeline_id} + Setup Client ${username} ${password} ${project} + ${pipeline_param}= Create Dictionary recipient=integration_test + ${run_id} Import Run Pipeline pipeline_url=${URL_TEST_PIPELINE_RUN_YAML} pipeline_params=${pipeline_param} ${run_status} Check Run Status ${run_id} - Should Be True ${run_status} Pipeline run doesn't have a status that means success. Check the logs - DataSciencePipelinesAPI.Delete Runs ${run_id} - DataSciencePipelinesAPI.Delete Pipeline ${pipeline_id} + Should Be Equal As Strings ${run_status} SUCCEEDED Pipeline run doesn't have a status that means success. Check the logs + DataSciencePipelinesKfp.Delete Run ${run_id} [Teardown] Remove Pipeline Project ${project} Double Check If DSPA Was Created @@ -104,4 +100,3 @@ Data Science Pipelines Suite Setup [Documentation] Data Science Pipelines Suite Setup Set Library Search Order SeleniumLibrary RHOSi Setup - Install Red Hat OpenShift Pipelines diff --git a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/432__data-science-pipelines-tekton.robot b/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/432__data-science-pipelines-kfp.robot similarity index 68% rename from ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/432__data-science-pipelines-tekton.robot rename to ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/432__data-science-pipelines-kfp.robot index 714ca28cb..0de8d7f45 100644 --- a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/432__data-science-pipelines-tekton.robot +++ b/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/432__data-science-pipelines-kfp.robot @@ -1,49 +1,50 @@ *** Settings *** -Documentation Test suite for OpenShift Pipeline using kfp_tekton python package +Documentation Test suite for OpenShift Pipeline using kfp python package Resource ../../../Resources/RHOSi.resource Resource ../../../Resources/ODS.robot Resource ../../../Resources/Common.robot Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDashboard.robot -Resource ../../../Resources/Page/Operators/OpenShiftPipelines.resource Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource Library DateTime Library ../../../../libs/DataSciencePipelinesAPI.py -Library ../../../../libs/DataSciencePipelinesKfpTekton.py +Library ../../../../libs/DataSciencePipelinesKfp.py Test Tags DataSciencePipelines Suite Setup Data Science Pipelines Suite Setup Suite Teardown RHOSi Teardown *** Variables *** -${PROJECT_NAME}= pipelineskfptekton1 +${PROJECT_NAME}= pipelineskfp1 *** Test Cases *** -Verify Ods Users Can Create And Run A Data Science Pipeline Using The kfp_tekton Python Package +Verify Ods Users Can Create And Run A Data Science Pipeline Using The kfp Python Package [Documentation] Creates, runs pipelines with regular user. Double check the pipeline result and clean ... the pipeline resources. - [Tags] Sanity - ... Tier1 - ... ODS-2203 - End To End Pipeline Workflow Using Kfp Tekton + [Tags] Sanity Tier1 ODS-2203 + ${emtpy_dict}= Create Dictionary + End To End Pipeline Workflow Using Kfp ... username=${TEST_USER.USERNAME} ... password=${TEST_USER.PASSWORD} ... project=${PROJECT_NAME} ... python_file=flip_coin.py ... method_name=flipcoin_pipeline ... status_check_timeout=440 - End To End Pipeline Workflow Using Kfp Tekton + ... pipeline_params=${emtpy_dict} + ${upload_download_dict}= Create Dictionary mlpipeline_minio_artifact_secret=value bucket_name=value + End To End Pipeline Workflow Using Kfp ... username=${TEST_USER.USERNAME} ... password=${TEST_USER.PASSWORD} ... project=${PROJECT_NAME} ... python_file=upload_download.py ... method_name=wire_up_pipeline ... status_check_timeout=440 + ... pipeline_params=${upload_download_dict} [Teardown] Remove Pipeline Project ${PROJECT_NAME} -Verify Ods Users Can Create And Run A Data Science Pipeline With Ray Using The kfp_tekton Python Package +Verify Ods Users Can Create And Run A Data Science Pipeline With Ray Using The kfp Python Package [Documentation] Creates, runs pipelines with regular user. Double check the pipeline result and clean ... the pipeline resources. [Tags] Sanity @@ -51,38 +52,37 @@ Verify Ods Users Can Create And Run A Data Science Pipeline With Ray Using The k ... ODS-2541 Skip If Component Is Not Enabled ray Skip If Component Is Not Enabled codeflare - End To End Pipeline Workflow Using Kfp Tekton + End To End Pipeline Workflow Using Kfp ... username=${TEST_USER.USERNAME} ... password=${TEST_USER.PASSWORD} ... project=${PROJECT_NAME} ... python_file=ray_integration.py ... method_name=ray_integration ... status_check_timeout=440 + ... pipeline_params={'openshift_server': 'value', 'openshift_token': 'value'} [Teardown] Remove Pipeline Project ${PROJECT_NAME} *** Keywords *** # robocop: disable:line-too-long -End To End Pipeline Workflow Using Kfp Tekton - [Documentation] Create, run and double check the pipeline result using Kfp_tekton python package. In the end, +End To End Pipeline Workflow Using Kfp + [Documentation] Create, run and double check the pipeline result using Kfp python package. In the end, ... clean the pipeline resources. [Arguments] ${username} ${password} ${project} ${python_file} ${method_name} - ... ${status_check_timeout}=160 + ... ${pipeline_params} ${status_check_timeout}=160 Remove Pipeline Project ${project} New Project ${project} Install DataSciencePipelinesApplication CR ${project} - ${status} Login And Wait Dsp Route ${username} ${password} ${project} ds-pipeline-pipelines-definition + ${status} Login And Wait Dsp Route ${username} ${password} ${project} Should Be True ${status} == 200 Could not login to the Data Science Pipelines Rest API OR DSP routing is not working - ${result} Kfp Tekton Create Run From Pipeline Func ${username} ${password} ${project} - ... ds-pipeline-pipelines-definition ${python_file} ${method_name} - ${run_status} Kfp Tekton Wait For Run Completion ${username} ${password} ${project} - ... ds-pipeline-pipelines-definition ${result} ${status_check_timeout} - Should Be True ${run_status} Pipeline run doesn't have a status that means success. Check the log + ${run_id} Create Run From Pipeline Func ${username} ${password} ${project} + ... ${python_file} ${method_name} pipeline_params=${pipeline_params} + ${run_status} Check Run Status ${run_id} + Should Be Equal As Strings ${run_status} SUCCEEDED Pipeline run doesn't have a status that means success. Check the logs Remove Pipeline Project ${project} Data Science Pipelines Suite Setup [Documentation] Data Science Pipelines Suite Setup Set Library Search Order SeleniumLibrary RHOSi Setup - Install Red Hat OpenShift Pipelines diff --git a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/433__data-science-pipelines-general.robot b/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/433__data-science-pipelines-general.robot index e7d10326a..2add9a8a7 100644 --- a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/433__data-science-pipelines-general.robot +++ b/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/433__data-science-pipelines-general.robot @@ -9,7 +9,6 @@ Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataSciencePipel Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/DataConnections.resource Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Projects.resource Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Pipelines.resource -Resource ../../../Resources/Page/Operators/OpenShiftPipelines.resource Library DateTime Library ../../../../libs/DataSciencePipelinesAPI.py Test Tags DataSciencePipelines @@ -39,12 +38,12 @@ Verify Ods User Can Bind The Route Role ... ${TEST_USER_4.AUTH_TYPE} ${PROJECT_USER4} # due that the projects were created, it is expected a failure in the first request ${status} Login And Wait Dsp Route ${TEST_USER_3.USERNAME} ${TEST_USER_3.PASSWORD} - ... ${PROJECT_USER4} ds-pipeline-pipelines-definition ${1} + ... ${PROJECT_USER4} ${1} Should Be True ${status} == 403 The user must not have permission to access Add Role To User ds-pipeline-user-access-pipelines-definition ${TEST_USER_3.USERNAME} ${PROJECT_USER4} # rbac is async and takes some time ${status} Login And Wait Dsp Route ${TEST_USER_3.USERNAME} ${TEST_USER_3.PASSWORD} ${PROJECT_USER4} - ... ds-pipeline-pipelines-definition ${30} + ... ${30} Should Be True ${status} == 200 Rolling Binding Not Working @@ -77,5 +76,5 @@ Create A Pipeline Server And Wait For Dsp Route ... aws_bucket_name=${S3_BUCKET} Reload Page Create Pipeline Server dc_name=${project}-dc project_title=${project} - ${status} Login And Wait Dsp Route ${user} ${password} ${project} ds-pipeline-pipelines-definition + ${status} Login And Wait Dsp Route ${user} ${password} ${project} Should Be True ${status} == 200 Could not login to the Data Science Pipelines Rest API OR DSP routing is not working # robocop: disable:line-too-long diff --git a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/434__data-science-pipelines-ui.robot b/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/434__data-science-pipelines-ui.robot index b11c76e9e..fad298a0a 100644 --- a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/434__data-science-pipelines-ui.robot +++ b/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/434__data-science-pipelines-ui.robot @@ -4,7 +4,6 @@ Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProjec Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/DataConnections.resource Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Pipelines.resource Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource -Resource ../../../Resources/Page/Operators/OpenShiftPipelines.resource Test Tags DataSciencePipelines Suite Setup Pipelines Suite Setup Suite Teardown Pipelines Suite Teardown @@ -118,8 +117,6 @@ Verify Pipeline Metadata Pods Are Not Deployed When Running Pipelines Pipelines Suite Setup # robocop: disable [Documentation] Sets global test variables, create a DS project and a data connection Set Library Search Order SeleniumLibrary - # TODO: Install Pipeline only if it does not already installed - Install Red Hat OpenShift Pipelines ${to_delete}= Create List ${PRJ_TITLE} Set Suite Variable ${PROJECTS_TO_DELETE} ${to_delete} Launch Data Science Project Main Page username=${TEST_USER_3.USERNAME} diff --git a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/435__data-science-pipelines-elyra.robot b/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/435__data-science-pipelines-elyra.robot index 424998eba..5f84f9709 100644 --- a/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/435__data-science-pipelines-elyra.robot +++ b/ods_ci/tests/Tests/400__ods_dashboard/430__data_science_pipelines/435__data-science-pipelines-elyra.robot @@ -11,7 +11,6 @@ Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/ Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Storages.resource Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/DataConnections.resource Resource ../../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Pipelines.resource -Resource ../../../Resources/Page/Operators/OpenShiftPipelines.resource Library Screenshot Library DebugLibrary Library JupyterLibrary @@ -88,7 +87,6 @@ Elyra Pipelines Suite Setup [Documentation] Suite Setup Set Library Search Order SeleniumLibrary RHOSi Setup - Install Red Hat OpenShift Pipelines Elyra Pipelines SDS Setup [Documentation] Suite Setup, creates DS Project and opens it diff --git a/poetry.lock b/poetry.lock index f92a56bb9..332cfe043 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,15 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. - -[[package]] -name = "absl-py" -version = "1.4.0" -description = "Abseil Python Common Libraries, see https://github.com/abseil/abseil-py." -optional = false -python-versions = ">=3.6" -files = [ - {file = "absl-py-1.4.0.tar.gz", hash = "sha256:d2c244d01048ba476e7c080bd2c6df5e141d211de80223460d5b3b8a2a58433d"}, - {file = "absl_py-1.4.0-py3-none-any.whl", hash = "sha256:0d3fe606adfa4f7db64792dd4c7aee4ee0c38ab75dfd353b7a83ed3e957fcb47"}, -] +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aenum" @@ -450,17 +439,6 @@ PrettyTable = ">=0.7.2" PyYAML = ">=3.12" stevedore = ">=2.0.1" -[[package]] -name = "cloudpickle" -version = "2.2.1" -description = "Extended pickling support for Python objects" -optional = false -python-versions = ">=3.6" -files = [ - {file = "cloudpickle-2.2.1-py3-none-any.whl", hash = "sha256:61f594d1f4c295fa5cd9014ceb3a1fc4a70b0de1164b94fbc2d854ccba056f9f"}, - {file = "cloudpickle-2.2.1.tar.gz", hash = "sha256:d89684b8de9e34a2a43b3460fbca07d09d6e25ce858df4d5a44240403b6178f5"}, -] - [[package]] name = "cmd2" version = "2.4.3" @@ -605,23 +583,6 @@ files = [ {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - [[package]] name = "distlib" version = "0.3.7" @@ -935,25 +896,6 @@ grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -[[package]] -name = "google-api-python-client" -version = "1.12.11" -description = "Google API Client Library for Python" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -files = [ - {file = "google-api-python-client-1.12.11.tar.gz", hash = "sha256:1b4bd42a46321e13c0542a9e4d96fa05d73626f07b39f83a73a947d70ca706a9"}, - {file = "google_api_python_client-1.12.11-py2.py3-none-any.whl", hash = "sha256:7e0a1a265c8d3088ee1987778c72683fcb376e32bada8d7767162bd9c503fd9b"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.21.0,<3dev", markers = "python_version >= \"3\""} -google-auth = {version = ">=1.16.0,<3dev", markers = "python_version >= \"3\""} -google-auth-httplib2 = ">=0.0.3" -httplib2 = ">=0.15.0,<1dev" -six = ">=1.13.0,<2dev" -uritemplate = ">=3.0.0,<4dev" - [[package]] name = "google-auth" version = "2.23.0" @@ -978,21 +920,6 @@ pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] -[[package]] -name = "google-auth-httplib2" -version = "0.1.1" -description = "Google Authentication Library: httplib2 transport" -optional = false -python-versions = "*" -files = [ - {file = "google-auth-httplib2-0.1.1.tar.gz", hash = "sha256:c64bc555fdc6dd788ea62ecf7bccffcf497bf77244887a3f3d7a5a02f8e3fc29"}, - {file = "google_auth_httplib2-0.1.1-py2.py3-none-any.whl", hash = "sha256:42c50900b8e4dcdf8222364d1f0efe32b8421fb6ed72f2613f12f75cc933478c"}, -] - -[package.dependencies] -google-auth = "*" -httplib2 = ">=0.19.0" - [[package]] name = "google-cloud-core" version = "2.3.3" @@ -1219,20 +1146,6 @@ files = [ beautifulsoup4 = ">=4.7.0" python-docx = ">=0.8.10" -[[package]] -name = "httplib2" -version = "0.22.0" -description = "A comprehensive HTTP client library." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, - {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, -] - -[package.dependencies] -pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} - [[package]] name = "hubspot-api-client" version = "4.0.6" @@ -1534,63 +1447,54 @@ test = ["PyYAML (>=3.12)", "bandit (>=1.1.0,<1.6.0)", "betamax (>=0.7.0)", "cove [[package]] name = "kfp" -version = "1.8.22" -description = "KubeFlow Pipelines SDK" +version = "2.7.0" +description = "Kubeflow Pipelines SDK" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7.0,<3.13.0" files = [ - {file = "kfp-1.8.22.tar.gz", hash = "sha256:3d300cb0f6d5bb303c1197f4d2740f2f27ab1fa6fd6aaa6dd8e72cfa85a72989"}, + {file = "kfp-2.7.0.tar.gz", hash = "sha256:8a2065527ec3d50617bd374c2b25cffeab16d93b34e4be08c1ca3e4bd8d2cc0c"}, ] [package.dependencies] -absl-py = ">=0.9,<2" -click = ">=7.1.2,<9" -cloudpickle = ">=2.0.0,<3" -Deprecated = ">=1.2.7,<2" +click = ">=8.0.0,<9" docstring-parser = ">=0.7.3,<1" -fire = ">=0.3.1,<1" google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-api-python-client = ">=1.7.8,<2" google-auth = ">=1.6.1,<3" -google-cloud-storage = ">=1.20.0,<3" -jsonschema = ">=3.0.1,<5" -kfp-pipeline-spec = ">=0.1.16,<0.2.0" -kfp-server-api = ">=1.1.2,<2.0.0" -kubernetes = ">=8.0.0,<26" -protobuf = ">=3.13.0,<4" -pydantic = ">=1.8.2,<2" +google-cloud-storage = ">=2.2.1,<3" +kfp-pipeline-spec = "0.3.0" +kfp-server-api = ">=2.0.0,<2.1.0" +kubernetes = ">=8.0.0,<27" +protobuf = ">=4.21.1,<5" PyYAML = ">=5.3,<7" requests-toolbelt = ">=0.8.0,<1" -strip-hints = ">=0.1.8,<1" tabulate = ">=0.8.6,<1" -typer = ">=0.3.2,<1.0" -uritemplate = ">=3.0.1,<4" -urllib3 = "<2" +urllib3 = "<2.0.0" [package.extras] -all = ["docker"] +all = ["docker", "kfp-kubernetes (<2)"] +kubernetes = ["kfp-kubernetes (<2)"] [[package]] name = "kfp-pipeline-spec" -version = "0.1.16" +version = "0.3.0" description = "Kubeflow Pipelines pipeline spec" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.7.0,<3.13.0" files = [ - {file = "kfp_pipeline_spec-0.1.16-py3-none-any.whl", hash = "sha256:4cefae00ac50145cf862127202a8b8a783ed7504c773d7d7c517bd115283be25"}, + {file = "kfp_pipeline_spec-0.3.0-py3-none-any.whl", hash = "sha256:1db84524a0a2d6c9d36e7e87e6fa0e181bf1ba1513d29dcd54f7b8822e7a52a2"}, ] [package.dependencies] -protobuf = ">=3.13.0,<4" +protobuf = ">=4.21.1,<5" [[package]] name = "kfp-server-api" -version = "1.8.5" +version = "2.0.5" description = "Kubeflow Pipelines API" optional = false python-versions = "*" files = [ - {file = "kfp-server-api-1.8.5.tar.gz", hash = "sha256:482d71765ba57c003164dbb980a8cb1a18d234b578d064dc88dbeb3e4c7ab6de"}, + {file = "kfp-server-api-2.0.5.tar.gz", hash = "sha256:c9cfbf0e87271d3bfe96e5ecc9ffbdd6ab566bc1c9a9ddc2a39d7698a16e26ff"}, ] [package.dependencies] @@ -1599,20 +1503,6 @@ python-dateutil = "*" six = ">=1.10" urllib3 = ">=1.15" -[[package]] -name = "kfp-tekton" -version = "1.5.9" -description = "Tekton Compiler for Kubeflow Pipelines" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "kfp-tekton-1.5.9.tar.gz", hash = "sha256:62e745dba6a4c56e50b8ef23e7eeeac4d85893dbacf398a53355672b29643722"}, -] - -[package.dependencies] -kfp = ">=1.8.10,<1.8.23" -PyYAML = ">=6,<7" - [[package]] name = "korean-lunar-calendar" version = "0.3.1" @@ -2657,33 +2547,22 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "3.20.3" -description = "Protocol Buffers" +version = "4.25.3" +description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, - {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, - {file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"}, - {file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"}, - {file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"}, - {file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"}, - {file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"}, - {file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"}, - {file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"}, - {file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"}, - {file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"}, - {file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"}, - {file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"}, - {file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"}, - {file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"}, - {file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"}, - {file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"}, - {file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"}, - {file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"}, - {file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"}, - {file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"}, - {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] [[package]] @@ -2784,58 +2663,6 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -[[package]] -name = "pydantic" -version = "1.10.12" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, - {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, - {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, - {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, - {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, - {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, - {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, - {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, - {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, - {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, -] - -[package.dependencies] -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - [[package]] name = "pyflakes" version = "3.1.0" @@ -4448,19 +4275,6 @@ files = [ {file = "stringcase-1.2.0.tar.gz", hash = "sha256:48a06980661908efe8d9d34eab2b6c13aefa2163b3ced26972902e3bdfd87008"}, ] -[[package]] -name = "strip-hints" -version = "0.1.10" -description = "Function and command-line program to strip Python type hints." -optional = false -python-versions = "*" -files = [ - {file = "strip-hints-0.1.10.tar.gz", hash = "sha256:307c2bd147cd35997c8ed2e9a3bdca48ad9c9617e04ea46599095201b4ce998f"}, -] - -[package.dependencies] -wheel = "*" - [[package]] name = "tabulate" version = "0.9.0" @@ -4584,27 +4398,6 @@ six = ">=1.10.0" dev = ["coveralls (>=1.8.2)", "tox (>=2.4.0)"] test = ["mock (>=1.0.1)", "nose (>=1.3.3)", "vcrpy (>=1.10.3)"] -[[package]] -name = "typer" -version = "0.9.0" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -optional = false -python-versions = ">=3.6" -files = [ - {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, - {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, -] - -[package.dependencies] -click = ">=7.1.1,<9.0.0" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] -dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] - [[package]] name = "typing-extensions" version = "4.8.0" @@ -4659,17 +4452,6 @@ files = [ [package.dependencies] comtypes = ">=1.1.10" -[[package]] -name = "uritemplate" -version = "3.0.1" -description = "URI templates" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "uritemplate-3.0.1-py2.py3-none-any.whl", hash = "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f"}, - {file = "uritemplate-3.0.1.tar.gz", hash = "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae"}, -] - [[package]] name = "urllib3" version = "1.26.16" @@ -4768,20 +4550,6 @@ docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] -[[package]] -name = "wheel" -version = "0.41.2" -description = "A built-package format for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "wheel-0.41.2-py3-none-any.whl", hash = "sha256:75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8"}, - {file = "wheel-0.41.2.tar.gz", hash = "sha256:0c5ac5ff2afb79ac23ab82bab027a0be7b5dbcf2e54dc50efe4bf507de1f7985"}, -] - -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] - [[package]] name = "wrapt" version = "1.15.0" @@ -4971,4 +4739,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.11.5, <3.12.0" -content-hash = "8e41a6db77242aa6dc58d96f16709ddc0bdfe16dc2f315a53c10a66d2d0ec3d3" +content-hash = "5eacf3e6921ce7fe5436554884ce97c498dd2a51206c9fdebc648b1420b5e194" diff --git a/pyproject.toml b/pyproject.toml index 1b00f1ee3..8dbea8c98 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ rpaframework = "^27.2.0" pexpect = "^4.8.0" python-openstackclient = "^6.2.0" awscli = "^1.27.100" -kfp-tekton = "==1.5.9" +kfp = "==2.7.0" pyyaml = "^6.0.1" [tool.poetry.group.dev.dependencies]