-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathupdate_workflow.py
116 lines (85 loc) · 3.5 KB
/
update_workflow.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
#!/usr/bin/env python
"""
This script fetches the latest workflows from the central repository 'release-scripts'
and updates the local dummy workflows. Before running the script, install the required
packages using the following command:
conda install requests
This script assumes the package repository has the same parent directory as 'release-scripts'.
You can change this by modifying the 'LOCAL_WORKFLOW_DIR' variable.
Sometimes there would be timeout errors while fetching the workflows from the central repository.
In such cases, you can try running the script again.
"""
import os
import re
from pathlib import Path
import requests
proj = (
input(f"Enter value for 'PROJECT' (default: {'PROJECT_NAME'}): ").strip()
or "PROJECT_NAME"
)
pwd = os.path.dirname(__file__)
CENTRAL_REPO_ORG = "Billingegroup"
CENTRAL_REPO_NAME = "release-scripts"
CENTRAL_WORKFLOW_DIR = ".github/workflows/templates"
LOCAL_WORKFLOW_DIR = Path(pwd + "/../" + proj + "/.github/workflows")
user_input_cache = {"PROJECT": proj}
def get_central_workflows():
base_url = (
f"https://api.github.com/repos/{CENTRAL_REPO_ORG}/"
f"{CENTRAL_REPO_NAME}/contents/{CENTRAL_WORKFLOW_DIR}"
)
response = requests.get(base_url, timeout=5)
if response.status_code != 200:
raise Exception(f"Failed to fetch central workflows: {response.status_code}")
workflows = {}
for file in response.json():
if file["type"] == "file" and file["name"].endswith(".yml"):
content_response = requests.get(file["download_url"], timeout=5)
if content_response.status_code == 200:
workflows[file["name"]] = content_response.text
return workflows
def get_user_input(prompt, default, param_name):
if param_name in user_input_cache:
return user_input_cache[param_name]
user_value = input(f"{prompt} (default: {default}): ").strip()
if user_value.lower() == "false":
value = "false"
elif user_value.lower() == "true":
value = "true"
elif user_value:
value = user_value
else:
value = default
user_input_cache[param_name] = value
return value
def update_workflow_params(content):
def replace_match(match):
key = match.group(1)
default_value = match.group(2)
if key not in user_input_cache:
user_value = get_user_input(f"Enter value for '{key}'", default_value, key)
user_input_cache[key] = user_value
return str(user_input_cache[key])
pattern = re.compile(r"\{\{\s*(\w+)\s*/\s*([^\s\}]+)\s*\}\}")
return pattern.sub(replace_match, content)
def update_local_workflows(central_workflows):
local_workflows = set(f.name for f in LOCAL_WORKFLOW_DIR.glob("*.yml"))
central_workflow_names = set(central_workflows.keys())
for name, content in central_workflows.items():
local_file = LOCAL_WORKFLOW_DIR / name
content = update_workflow_params(content)
with open(local_file, "w", encoding="utf-8") as file:
file.write(content)
for name in local_workflows - central_workflow_names:
(LOCAL_WORKFLOW_DIR / name).unlink()
print(f"Removed workflow {name}")
def main():
try:
LOCAL_WORKFLOW_DIR.mkdir(parents=True, exist_ok=True)
central_workflows = get_central_workflows()
update_local_workflows(central_workflows)
print("Workflow synchronization completed successfully")
except Exception as e:
print(f"Error: {str(e)}")
if __name__ == "__main__":
main()