-
Notifications
You must be signed in to change notification settings - Fork 81
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
439e2b3
commit fa2be33
Showing
25 changed files
with
411 additions
and
599 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -14,53 +14,15 @@ class DataSciencePipelinesAPI: | |
def __init__(self): | ||
self.route = "" | ||
self.sa_token = None | ||
|
||
@keyword | ||
def wait_until_openshift_pipelines_operator_is_deployed(self): | ||
""" | ||
when creating at the first time, it can take like 1 minute to have the pods ready | ||
""" | ||
deployment_count = 0 | ||
count = 0 | ||
while deployment_count != 1 and count < 30: | ||
deployments = [] | ||
response, _ = self.run_oc("oc get deployment -n openshift-operators openshift-pipelines-operator -o json") | ||
try: | ||
response = json.loads(response) | ||
if ( | ||
response["metadata"]["name"] == "openshift-pipelines-operator" | ||
and "readyReplicas" in response["status"] | ||
and response["status"]["readyReplicas"] == 1 | ||
): | ||
deployments.append(response) | ||
except JSONDecodeError: | ||
pass | ||
deployment_count = len(deployments) | ||
time.sleep(1) | ||
count += 1 | ||
pipeline_run_crd_count = 0 | ||
count = 0 | ||
while pipeline_run_crd_count < 1 and count < 60: | ||
# https://github.com/opendatahub-io/odh-dashboard/issues/1673 | ||
# It is possible to start the Pipeline Server without pipelineruns.tekton.dev CRD | ||
pipeline_run_crd_count = self.count_pods("oc get crd pipelineruns.tekton.dev", 1) | ||
time.sleep(1) | ||
count += 1 | ||
assert pipeline_run_crd_count == 1 | ||
return self.count_running_pods( | ||
"oc get pods -n openshift-operators -l name=openshift-pipelines-operator -o json", | ||
"openshift-pipelines-operator", | ||
"Running", | ||
1, | ||
) | ||
self.sleep_time = 45 | ||
|
||
@keyword | ||
def login_and_wait_dsp_route( | ||
self, | ||
user, | ||
pwd, | ||
project, | ||
route_name="ds-pipeline-pipelines-definition", | ||
route_name="ds-pipeline-dspa", | ||
timeout=120, | ||
): | ||
print("Fetch token") | ||
|
@@ -89,7 +51,7 @@ def login_and_wait_dsp_route( | |
|
||
assert self.route != "", "Route must not be empty" | ||
print(f"Waiting for Data Science Pipeline route to be ready to avoid firing false alerts: {self.route}") | ||
time.sleep(45) | ||
time.sleep(self.sleep_time) | ||
status = -1 | ||
count = 0 | ||
while status != 200 and count < timeout: | ||
|
@@ -102,8 +64,8 @@ def login_and_wait_dsp_route( | |
# if you need to debug, try to print also the response | ||
print(f"({count}): Data Science Pipeline HTTP Status: {status}") | ||
if status != 200: | ||
time.sleep(30) | ||
count += 30 | ||
time.sleep(self.sleep_time) | ||
count += self.sleep_time | ||
return status | ||
|
||
@keyword | ||
|
@@ -121,112 +83,6 @@ def remove_pipeline_project(self, project): | |
time.sleep(1) | ||
count += 1 | ||
|
||
@keyword | ||
def create_pipeline(self, url_test_pipeline_run_yaml): | ||
print("Creating a pipeline from data science pipelines stack") | ||
test_pipeline_run_yaml, _ = self.do_get(url_test_pipeline_run_yaml) | ||
filename = "test_pipeline_run_yaml.yaml" | ||
with open(filename, "w", encoding="utf-8") as f: | ||
f.write(test_pipeline_run_yaml) | ||
with open(filename, "rb") as f: | ||
response, _ = self.do_upload( | ||
f"https://{self.route}/apis/v1beta1/pipelines/upload", | ||
files={"uploadfile": f}, | ||
headers={"Authorization": f"Bearer {self.sa_token}"}, | ||
) | ||
os.remove(filename) | ||
pipeline_json = json.loads(response) | ||
pipeline_id = pipeline_json["id"] | ||
response, status = self.do_get( | ||
f"https://{self.route}/apis/v1beta1/pipelines/{pipeline_id}", | ||
headers={"Authorization": f"Bearer {self.sa_token}"}, | ||
) | ||
assert status == 200 | ||
assert json.loads(response)["name"] == filename | ||
return pipeline_id | ||
|
||
@keyword | ||
def create_run(self, pipeline_id): | ||
print("Creating the run from uploaded pipeline") | ||
response, status = self.do_post( | ||
f"https://{self.route}/apis/v1beta1/runs", | ||
headers={ | ||
"Authorization": f"Bearer {self.sa_token}", | ||
"Content-Type": "application/json", | ||
}, | ||
json={ | ||
"name": "test-pipeline-run", | ||
"pipeline_spec": {"pipeline_id": f"{pipeline_id}"}, | ||
}, | ||
) | ||
assert status == 200 | ||
run_json = json.loads(response) | ||
run_id = run_json["run"]["id"] | ||
|
||
response, status = self.do_get( | ||
f"https://{self.route}/apis/v1beta1/runs/{run_id}", | ||
headers={"Authorization": f"Bearer {self.sa_token}"}, | ||
) | ||
assert status == 200 | ||
|
||
return run_id | ||
|
||
@keyword | ||
def check_run_status(self, run_id, timeout=160): | ||
run_status = None | ||
count = 0 | ||
run_finished_ok = False | ||
while not run_finished_ok and count < timeout: | ||
response, status = self.do_get( | ||
f"https://{self.route}/apis/v1beta1/runs/{run_id}", | ||
headers={"Authorization": f"Bearer {self.sa_token}"}, | ||
) | ||
try: | ||
run_json = json.loads(response) | ||
if "run" in run_json and "status" in run_json["run"]: | ||
run_status = run_json["run"]["status"] | ||
except JSONDecodeError: | ||
print(response, status) | ||
print(f"Checking run status: {run_status}") | ||
if run_status == "Failed": | ||
break | ||
# https://github.com/tektoncd/pipeline/blob/main/docs/pipelineruns.md#monitoring-execution-status | ||
if run_status in ("Completed", "Succeeded"): | ||
run_finished_ok = True | ||
break | ||
time.sleep(1) | ||
count += 1 | ||
return run_finished_ok | ||
|
||
@keyword | ||
def delete_runs(self, run_id): | ||
print("Deleting the runs") | ||
|
||
response, status = self.do_delete( | ||
f"https://{self.route}/apis/v1beta1/runs/{run_id}", | ||
headers={"Authorization": f"Bearer {self.sa_token}"}, | ||
) | ||
assert status == 200 | ||
response, status = self.do_get( | ||
f"https://{self.route}/apis/v1beta1/runs/{run_id}", | ||
headers={"Authorization": f"Bearer {self.sa_token}"}, | ||
) | ||
assert status == 404 | ||
|
||
@keyword | ||
def delete_pipeline(self, pipeline_id): | ||
print("Deleting the pipeline") | ||
response, status = self.do_delete( | ||
f"https://{self.route}/apis/v1beta1/pipelines/{pipeline_id}", | ||
headers={"Authorization": f"Bearer {self.sa_token}"}, | ||
) | ||
assert status == 200 | ||
response, status = self.do_get( | ||
f"https://{self.route}/apis/v1beta1/pipelines/{pipeline_id}", | ||
headers={"Authorization": f"Bearer {self.sa_token}"}, | ||
) | ||
assert status == 404 | ||
|
||
@keyword | ||
def add_role_to_user(self, name, user, project): | ||
output, error = self.run_oc(f"oc policy add-role-to-user {name} {user} -n {project} --role-namespace={project}") | ||
|
@@ -309,8 +165,11 @@ def run_oc(self, command): | |
output, error = process.communicate() | ||
return self.byte_to_str(output), error | ||
|
||
def do_get(self, url, headers=None): | ||
response = requests.get(url, headers=headers, verify=self.get_cert()) | ||
def do_get(self, url, headers=None, skip_ssl=False): | ||
if skip_ssl: | ||
response = requests.get(url, headers=headers, verify=False) | ||
Check failure Code scanning / SonarCloud Server certificates should be verified during SSL/TLS connections High
Enable server certificate validation on this SSL/TLS connection. See more on SonarCloud
|
||
else: | ||
response = requests.get(url, headers=headers, verify=self.get_cert()) | ||
return self.byte_to_str(response.content), response.status_code | ||
|
||
def do_post(self, url, headers, json): | ||
|
@@ -330,14 +189,15 @@ def byte_to_str(self, content): | |
|
||
def get_secret(self, project, name): | ||
secret_json, _ = self.run_oc(f"oc get secret -n {project} {name} -o json") | ||
assert len(secret_json) > 0 | ||
return json.loads(secret_json) | ||
|
||
def get_cert(self): | ||
cert_json = self.get_secret("openshift-ingress-operator", "router-ca") | ||
cert = cert_json["data"]["tls.crt"] | ||
decoded_cert = base64.b64decode(cert).decode("utf-8") | ||
|
||
file_name = "/tmp/kft-cert" | ||
file_name = "/tmp/kfp-cert" | ||
cert_file = open(file_name, "w") | ||
cert_file.write(decoded_cert) | ||
cert_file.close() | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.