-
Notifications
You must be signed in to change notification settings - Fork 0
175 lines (146 loc) · 5.53 KB
/
run_cirun_graviton.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
name: ASV Benchmarks
on:
push:
branches:
- main
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
permissions:
contents: read # to fetch code (actions/checkout)
env:
# GITHUB_TOKEN: ${{ secrets.OB_BENCH_TOKEN }}
# BENCHMARKS_REPO: ev-br/ob-bench-asv
ASV_CONFIG: asv.conf.json
NAME_PREFIX: gha
jobs:
bench:
strategy:
fail-fast: false
matrix:
include:
# define matrix.name to identify github actions machine as hostname changes everytime
- image: "cirun-aws-runner-graviton--${{ github.run_id }}"
name: "gha-aws-graviton"
- image: "cirun-aws-runner-cascade-lake--${{ github.run_id }}"
name: "gha-aws-skylake"
runs-on: ${{ matrix.image }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # To fetch all commits to be able to generate benchmarks html
- name: Print system information
run: |
if [ "$RUNNER_OS" == "Linux" ]; then
cat /proc/cpuinfo
else
echo "::error::$RUNNER_OS not supported"
exit 1
fi
- name: Install system dependencies
run: |
if [ "$RUNNER_OS" == "Linux" ]; then
sudo apt update
sudo apt-get install -y gfortran cmake ccache python3-pip pkg-config
else
echo "::error::$RUNNER_OS not supported"
exit 1
fi
- name: Install python dependencies
run: |
# --break-system-packages is required on ubuntu noble
pip3 install "numpy<2" meson meson-python ninja build asv virtualenv --break-system-packages
# install the nightly OpenBLAS wheel
pip3 install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple scipy-openblas32 --break-system-packages
# dump the pkg-config for the ASV build to pick up (cf $PKG_CONFIG_PATH usage under `Run benchmarks`)
python3 -c'import scipy_openblas32 as so; print(so.get_pkg_config())' > scipy_openblas.pc
- name: Print OpenBLAS information
run: |
echo "scipy_openblas.pc contents: "
cat scipy_openblas.pc
# store the OpenBLAS wheel info to add to the gh-pages commit message
echo ${{ matrix.name }}":" > wheel_info_${{ matrix.name }}
python3 -c'import scipy_openblas32 as sc; print(f"version={sc.__version__} - {sc.get_openblas_config()}")' >> wheel_info_${{ matrix.name }}
- name: Set and log asv machine configuration
run: |
python3 -m asv machine --yes --config asv.conf.json
echo "Machine Configuration:"
cat ~/.asv-machine.json
rm ~/.asv-machine.json
# set the machine name depending on the OS/arch image
echo "Setting machine name to ${{ matrix.name }}"
python3 -m asv machine --machine ${{ matrix.name }} --yes --config $ASV_CONFIG -v
cat ~/.asv-machine.json
- name: Run benchmarks
run: |
python3 -m asv run --config $ASV_CONFIG -v
ls -l .asv/results
echo ">>> results/machine"
ls -l .asv/results/${{ matrix.name }}
env:
PKG_CONFIG_PATH: ${{ github.workspace }}
- name: Store/Upload benchmark results
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.name }}
path: |
.asv/results/*
wheel_info_${{ matrix.name }}
if-no-files-found: error
combine-and-publish:
runs-on: ubuntu-latest
needs: bench
steps:
- name: Check out the repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # To fetch all commits to be able to generate benchmarks html
token: ${{ secrets.AWS_BENCHMARKS }}
- name: Download all artifacts from benchmarking runs
uses: actions/download-artifact@v4
- name: Collect past runs
run: |
git checkout gh-pages
echo "@ collect: "
ls -la
mkdir -p .asv/resuls
cp -r results .asv/results
ls -la .asv
ls -la .asv/results
- name: Combine the runs
run: |
# NB artifact names start with gha-
mv gha-aws-skylake/* .asv/results
mv gha-aws-graviton/* .asv/results
# for dir in `find . -name "$NAME_PREFIX-*"`; do cp -r $dir/* .asv/results; done
echo ">>>> on gh-pages"
ls -la
ls -la .asv/results
ls -la .asv/results/gha-aws-graviton
# return to main to be able to generate the new html report
git checkout main
echo ">>> on main"
ls -la
ls -la .asv/results
ls -la .asv/results/gha-aws-graviton
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Set up ASV and generate the html
run: |
pip install asv
asv machine --yes --config $ASV_CONFIG
asv publish --config $ASV_CONFIG -v
- name: Upload the results
run: |
git config --global user.email "project@openblas"
git config --global user.name "OB benchmark bot"
asv gh-pages
# persist the results/ directory with historic results
git checkout gh-pages
cp -r .asv/results results
git add results/
#git commit -am"add results for `git rev-parse origin/gh-pages`\n\n`cat wheel_info*`"
#git push origin HEAD
ls -l # wheel_info_*