-
Notifications
You must be signed in to change notification settings - Fork 14
431 lines (400 loc) · 18.5 KB
/
build_wheels.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
name: Build PyPop
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
on:
workflow_dispatch:
pull_request:
paths-ignore:
- '**.md'
- '**.rst'
- 'CITATION.cff'
- '.zenodo.extras.json'
- 'website/**'
- 'data/**'
- 'src/obsolete/**'
- '.github/dependabot.yml'
- '.github/release-drafter.yml'
- '.github/ISSUE_TEMPLATE/**'
- '.github/workflows/documentation.yaml'
- '.github/workflows/buildjet_arm64.yml'
- '.github/workflows/release-drafter.yml'
- '.github/workflows/codeql.yml'
- '.github/workflows/nuget_gsl_arm64_package.yml'
- '.gitattributes'
- '.pre-commit-config.yaml'
push:
paths-ignore:
- '**.md'
- '**.rst'
- 'CITATION.cff'
- '.zenodo.extras.json'
- 'website/**'
- 'data/**'
- 'src/obsolete/**'
- '.github/dependabot.yml'
- '.github/release-drafter.yml'
- '.github/ISSUE_TEMPLATE/**'
- '.github/workflows/documentation.yaml'
- '.github/workflows/buildjet_arm64.yml'
- '.github/workflows/release-drafter.yml'
- '.github/workflows/codeql.yml'
- '.github/workflows/nuget_gsl_arm64_package.yml'
- '.gitattributes'
- '.pre-commit-config.yaml'
release:
types:
- published
env:
# set MSF version and cache directory name globally
MSF_VERSION: 3.59.0-alpha
MSF_CACHE_DIR: msf-files-3.59.0-alpha
jobs:
skip_publish_zenodo:
name: Non publication, Zenodo skip
runs-on: ubuntu-latest
# FIXME: this is the default action, the logical inverse of the
# below "if" in publish_zenodo: a bit hacky, but it works
if: (github.event_name == 'release' && github.event.action == 'published' && github.event.release.prerelease == false) == false
steps:
- name: Skip Zenodo step unless publishing release
run: |
echo "Not a publication release, skipping Zenodo"
echo "github.ref: ${{ toJson(github.ref) }}"
echo "github.event_name: ${{ toJson(github.event_name) }}"
echo "github.event.action: ${{ toJson(github.event.action) }}"
publish_zenodo:
name: Production release, Zenodo publication
runs-on: ubuntu-latest
# "else" we *are* publishing something, only works for non-pre-releases on the main branch
if: github.event.release.target_commitish == 'main' && github.event_name == 'release' && github.event.action == 'published' && github.event.release.prerelease == false
steps:
- name: Checkout the contents of your repository
uses: actions/checkout@v4
- name: Upsert the version and related ids in the metadata in CITATION.cff
run: |
echo "GITHUB_REF_NAME = $GITHUB_REF_NAME | GITHUB_REF = $GITHUB_REF | GITHUB_EVENT_PATH = $GITHUB_EVENT_PATH"
echo "github.event.release.target_commitish = ${{ github.event.release.target_commitish }}"
# convert semver to Python-style version
VERSION_PYTHON_STYLE=$(GITHUB_REF_NAME=$GITHUB_REF_NAME python -c 'import os; from packaging import version; print(str(version.Version(os.getenv("GITHUB_REF_NAME"))))')
echo "VERSION_PYTHON_STYLE = $VERSION_PYTHON_STYLE"
# insert version
yq -i ".version = \"$GITHUB_REF_NAME\"" CITATION.cff
# update the 'related_identifiers' to use exact tree and pypi package
yq -i "(.identifiers[] | select(.value == \"https://github.com/alexlancaster/pypop/tree*\") | .value) |= \"https://github.com/alexlancaster/pypop/tree/$GITHUB_REF_NAME\"" CITATION.cff
yq -i "(.identifiers[] | select(.value == \"https://pypi.org/project/pypop-genomics/*\") | .value) |= \"https://pypi.org/project/pypop-genomics/$VERSION_PYTHON_STYLE\"" CITATION.cff
# temporarily remove DOI
yq -i eval "del(.doi)" CITATION.cff
# use pre-prelease of cffconvert 3.0.0
pip install -U pip
pip install 'git+https://github.com/alexlancaster/cffconvert.git@combine_features#egg=cffconvert'
# convert to json, but remove creators
cffconvert --format zenodo --infile CITATION.cff | jq 'del(.creators)' > .zenodo.citation.json
# save creators separately, if they are included in the merge they get re-sorted
cffconvert --format zenodo --infile CITATION.cff | jq '{creators, creators}' > .zenodo.creators.json
# need to do a "deep merge" to make sure 'related_identifiers' from .zenodo.extra.json get merged properly
# including updating the version number, followed by merging back with creators
cat .zenodo.citation.json .zenodo.extras.json |jq -s 'def deepmerge(a;b):
reduce b[] as $item (a;
reduce ($item | keys_unsorted[]) as $key (.;
$item[$key] as $val | ($val | type) as $type | .[$key] = if ($type == "object") then
deepmerge({}; [if .[$key] == null then {} else .[$key] end, $val])
elif ($type == "array") then
(.[$key] + $val | unique)
else
$val
end)
);
deepmerge({}; .)' -S --indent 4 | cat .zenodo.creators.json - | jq -s add --indent 4 > .zenodo.json
# debugging only
cat .zenodo.citation.json
cat .zenodo.creators.json
cat .zenodo.json
rm .zenodo.citation.json .zenodo.creators.json
- name: Push changes back to repo
run: |
git fetch origin
git config --global user.name "ci-pypop"
git config --global user.email "[email protected]"
git checkout -b $GITHUB_REF_NAME-with-upserting-changes-pre-upload
git add CITATION.cff
git commit -m "Upserted version into CITATION.cff"
git checkout ${{ github.event.release.target_commitish }}
git merge $GITHUB_REF_NAME-with-upserting-changes-pre-upload
git push origin ${{ github.event.release.target_commitish }}
- name: Clean-up and remove files not for export
run: |
# FIXME: removing files marked with 'export-ignore' 'set' in .gitattributes
# before zenodraft creates the zipball using https://stackoverflow.com/a/30573319
# to ensure that we handle filenames with potential ':' or newlines
git ls-files -z|git check-attr --stdin -z export-ignore |sed -zne 'x;n;n;s/^set$//;t print;b;:print;x;p'|xargs --null rm
# FIXME: need to also remove orphaned empty directories resulting from above
find . -empty -type d -delete
- name: Create a draft snapshot of your repository contents as a new
version using metadata from generated .zenodo.json
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ZENODO_ACCESS_TOKEN: ${{ secrets.ZENODO_ACCESS_TOKEN }}
uses: zenodraft/[email protected]
with:
concept: 10080667
metadata: .zenodo.json
publish: false
sandbox: false
compression: zip
upsert-doi: true
upsert-location: doi
verbose: true
generate-wheels-matrix:
# Create a matrix of all architectures & versions to build.
# This enables the next step to run cibuildwheel in parallel.
# From https://iscinumpy.dev/post/cibuildwheel-2-10-0/#only-210
name: Generate wheels matrix
runs-on: ubuntu-latest
outputs:
include: ${{ steps.set-matrix.outputs.include }}
steps:
- uses: actions/checkout@v4
- name: Install cibuildwheel
# Nb. keep cibuildwheel version pin consistent with job below
run: pipx install cibuildwheel==2.22.0
- id: set-matrix
run: |
echo "CI_ONLY" $CI_ONLY
if [ "$CI_ONLY" == "true" ]; then
MATRIX="[{'only':'cp313-manylinux_x86_64','os':'ubuntu-20.04'},{'only':'cp313-manylinux_aarch64','os':'ubuntu-22.04-arm'},{'only':'cp313-win_amd64','os':'windows-2019'},{'only':'cp313-win_arm64','os':'windows-2019'},{'only':'cp313-macosx_x86_64','os':'macos-13'}, {'only':'cp313-macosx_arm64','os':'macos-14'}]"
else
MATRIX=$(
{
cibuildwheel --print-build-identifiers --platform linux --archs x86_64 \
| jq -nRc '{"only": inputs, "os": "ubuntu-20.04"}' \
&& cibuildwheel --print-build-identifiers --platform linux --archs aarch64 \
| jq -nRc '{"only": inputs, "os": "ubuntu-22.04-arm"}' \
&& cibuildwheel --print-build-identifiers --platform macos --archs x86_64 \
| jq -nRc '{"only": inputs, "os": "macos-13"}' \
&& cibuildwheel --print-build-identifiers --platform macos --archs arm64 \
| jq -nRc '{"only": inputs, "os": "macos-14"}' \
&& cibuildwheel --print-build-identifiers --platform windows \
| jq -nRc '{"only": inputs, "os": "windows-2019"}'
} | jq -sc
)
fi
echo "include=$MATRIX" >> $GITHUB_OUTPUT
echo "platform matrix:" $MATRIX
env:
# only generate all wheels on releases, pull requests or commits to the main branch
# otherwise just generate a test wheel Python 3.12 for 4 platform
CI_ONLY: ${{ github.event_name != 'release' && github.event_name != 'pull_request' && github.ref_name != 'main' && github.event_name != 'workflow_dispatch' }}
# set up a shared cache for downloading data files (e.g. MSF files)
# before matrix jobs
setup-cache:
name: Setup cache for remote files
runs-on: ubuntu-latest
steps:
- name: Set up cache for MSF files
id: msf_cache
uses: actions/cache@v4
with:
path: ${{ env.MSF_CACHE_DIR }}
key: pooch-cache-${{ env.MSF_CACHE_DIR }}
restore-keys: |
pooch-cache-${{ env.MSF_CACHE_DIR }}
enableCrossOsArchive: true
fail-on-cache-miss: false
- name: Preload MSF files into Cache
if: ${{ steps.msf_cache.outputs.cache-hit != 'true' }}
run: |
echo "Cache not found or is outdated. Fetching MSF files..."
pip install pooch requests
python -c "
import os
import pooch
import requests
# Define the base URL and path for the MSF files
base_url = 'https://raw.githubusercontent.com/ANHIG/IMGTHLA/v${MSF_VERSION}/msf/'
# create cache directory
cache_dir = '${MSF_CACHE_DIR}'
# fetch the directory listing of MSF files from GitHub (raw listing)
response = requests.get('https://api.github.com/repos/ANHIG/IMGTHLA/contents/msf?ref=v${MSF_VERSION}')
files = response.json()
# extract only prot.msf and nuc.msf files from the response
msf_files = [file['name'] for file in files if file['name'].endswith('_prot.msf') or file['name'].endswith('_nuc.msf')]
# fetch each MSF file using pooch.retrieve
for file in msf_files:
print(f'Fetching {file}...')
local_file = pooch.retrieve(
url=f'{base_url}{file}', # full URL to the file
known_hash=None, # pooch will calculate the hash automatically
path=cache_dir # cache location
)
print(f'Saved {local_file}...')
print('All MSF files fetched and cached successfully.')
"
- name: Save the cache for MSF files
if: ${{ steps.msf_cache.outputs.cache-hit != 'true' }}
uses: actions/cache/save@v4
with:
path: ${{ env.MSF_CACHE_DIR }}
key: pooch-cache-${{ env.MSF_CACHE_DIR }}
enableCrossOsArchive: true
build_wheels:
name: Build wheels on ${{ matrix.only }}
# depend on zenodo, to make sure updated CITATION.cff gets into builds
# this only results in changes upon production releases
needs: [publish_zenodo, skip_publish_zenodo, generate-wheels-matrix, setup-cache]
# trying solution here: https://github.com/actions/runner/issues/491#issuecomment-1507495166
# so that it works if either job in "needs" completes
if: always() && !cancelled() && !failure()
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include: ${{ fromJson(needs.generate-wheels-matrix.outputs.include) }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
# FIXME: if on a release, use the tag name, not original SHA because it might be changed (somewhat hacky)
ref: ${{ (github.event_name == 'release' && github.event.action == 'published') && github.ref_name || '' }}
- name: Restore cache for MSF files
uses: actions/cache/restore@v4
with:
# restore to the workspace, not home directory
path: ${{ env.MSF_CACHE_DIR }}
key: pooch-cache-${{ env.MSF_CACHE_DIR }}
restore-keys: |
pooch-cache-${{ env.MSF_CACHE_DIR }}
enableCrossOsArchive: true
- name: Do cache setup based on OS
shell: bash
run: |
echo "Setting POOCH_CACHE based on runner OS"
if [[ "$RUNNER_OS" == "Linux" ]]; then
# adding /project to path so it works inside cibuildwheel containers
POOCH_CACHE="/project/${MSF_CACHE_DIR}"
else
POOCH_CACHE="${GITHUB_WORKSPACE}/${MSF_CACHE_DIR}"
fi
echo "POOCH_CACHE=${POOCH_CACHE}"
echo "POOCH_CACHE=${POOCH_CACHE}" >> $GITHUB_ENV
- name: Install a recent stable Python to handle Python deps
uses: actions/setup-python@v5
with:
python-version: 3.12
- name: Query version with setuptools_scm
id: version
shell: bash
run: |
python -m pip install setuptools_scm
python -m pip install toml
VERSION_SCHEME=$(python -c "import tomllib; print(tomllib.load(open('pyproject.toml','rb'))['tool']['setuptools_scm']['version_scheme'])")
VERSION=$(python -c "import setuptools_scm; print(setuptools_scm.get_version(version_scheme=\"${VERSION_SCHEME}\"))")
echo "VERSION=${VERSION}"
echo "VERSION=${VERSION}" >> $GITHUB_ENV
- name: Install toml and remove cffconvert from pyproject.toml
run: |
python -c "
import toml
with open('pyproject.toml', 'r') as f:
config = toml.load(f)
if 'build-system' in config and 'requires' in config['build-system']:
config['build-system']['requires'] = [
dep for dep in config['build-system']['requires'] if 'cffconvert' not in dep.lower()
]
with open('pyproject.toml', 'w') as f:
toml.dump(config, f)
"
- name: Generate citation formats
run: |
python --version
python -m pip install git+https://github.com/alexlancaster/cffconvert.git@combine_features#egg=cffconvert
python src/PyPop/citation.py
- name: Build and test wheels
uses: pypa/[email protected]
env:
# FIXME: only run the slow tests when doing regular pushes, or manual - not for PRs
#CIBW_TEST_COMMAND: "pytest -v {package}/tests ${{ (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && '--runslow' || '' }}"
PYTEST_OPTIONS: "${{ (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && '--runslow' || '' }}"
SETUPTOOLS_SCM_PRETEND_VERSION: ${{ env.VERSION }}
POOCH_CACHE: ${{ env.POOCH_CACHE }}
CIBW_ENVIRONMENT_PASS_LINUX: SETUPTOOLS_SCM_PRETEND_VERSION POOCH_CACHE PYTEST_OPTIONS
with:
only: ${{ matrix.only }}
package-dir: .
output-dir: wheelhouse
config-file: "{package}/pyproject.toml"
- uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.only }}
path: ./wheelhouse/*.whl
# path: ./wheelhouse/* use if more than wheels are wanted
build_sdist:
name: Build source distribution
# depend on zenodo, to make sure updated CITATION.cff gets into builds
needs: [publish_zenodo, skip_publish_zenodo]
if: always() && !cancelled() && !failure()
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with: # FIXME: if on a release, use the tag name, not original SHA because it might be changed (somewhat hacky)
ref: ${{ (github.event_name == 'release' && github.event.action == 'published') && github.ref_name || '' }}
fetch-depth: 0
- name: Build sdist
run: pipx run build --sdist
- uses: actions/upload-artifact@v4
with:
name: sdist
path: dist/*.tar.gz
upload_gh_release:
name: Upload binary wheels and sdist to GH release page
needs: [build_wheels, build_sdist]
runs-on: ubuntu-latest
# alternatively, to publish when a GitHub Release is created, use the following rule:
if: github.event_name == 'release' && github.event.action == 'published' && always() && !cancelled() && !failure()
steps:
- uses: actions/download-artifact@v4
with:
# unpacks default artifact into dist/
path: dist
merge-multiple: true
- uses: softprops/action-gh-release@v2
name: Uploading binaries to release page
with:
files: dist/*
upload_test_pypi:
name: Upload to Test_PyPI
needs: [build_wheels, build_sdist]
runs-on: ubuntu-latest
environment: test_pypi
permissions:
id-token: write
# drop requirement of 'main', release to test_pypi to test releases
if: github.event_name == 'release' && github.event.action == 'published' && always() && !cancelled() && !failure()
steps:
- uses: actions/download-artifact@v4
with:
# unpacks default artifact into dist/
path: dist
merge-multiple: true
- uses: pypa/gh-action-pypi-publish@release/v1
with:
repository_url: https://test.pypi.org/legacy/
upload_pypi:
name: Upload to PyPI
needs: [build_wheels, build_sdist]
runs-on: ubuntu-latest
environment: pypi
permissions:
id-token: write
if: github.event.release.target_commitish == 'main' && github.event_name == 'release' && github.event.action == 'published' && always() && !cancelled() && !failure()
steps:
- uses: actions/download-artifact@v4
with:
# unpacks default artifact into dist/
path: dist
merge-multiple: true
- name: Exclude win_arm64 wheels
run: |
find dist -name '*win_arm64*.whl' -exec rm {} +
- name: Publish package distributions to PyPI
uses: pypa/gh-action-pypi-publish@release/v1