forked from airbus/discrete-optimization
-
Notifications
You must be signed in to change notification settings - Fork 3
561 lines (547 loc) · 23.1 KB
/
build.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
name: Build discrete-optimization
on:
push:
branches:
- "**"
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
pull_request:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
linters:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: create requirements.txt so that pip cache with setup-python works
run: echo "pre-commit" > requirements_precommit.txt
- uses: actions/setup-python@v5
with:
python-version: "3.8"
cache: "pip"
cache-dependency-path: requirements_precommit.txt
- name: install pre-commit
run: python -m pip install pre-commit
- name: get cached pre-commit hooks
uses: actions/cache@v4
with:
path: ~/.cache/pre-commit
key: pre-commit|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') }}
- name: pre-commit checks
run: pre-commit run --show-diff-on-failure --color=always --all-files
trigger:
# store trigger reason
runs-on: ubuntu-latest
outputs:
is_release: ${{ steps.reason.outputs.is_release }}
is_push_on_default_branch: ${{ steps.reason.outputs.is_push_on_default_branch }}
steps:
- id: reason
run: |
echo "is_release=${{ startsWith(github.ref, 'refs/tags/v') }}" >> $GITHUB_OUTPUT
echo "is_push_on_default_branch=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }}" >> $GITHUB_OUTPUT
build:
runs-on: ubuntu-latest
needs: trigger
outputs:
do_version: ${{ steps.get_wheel_version.outputs.version }}
steps:
- name: Checkout source code
uses: actions/checkout@v4
- name: create requirements.txt so that pip cache with setup-python works
run:
echo "build" > requirements_build.txt
- uses: actions/setup-python@v5
with:
python-version: "3.8"
cache: "pip"
cache-dependency-path: requirements_build.txt
- name: Install build dependencies
run: pip install -U build
- name: Update version number according to pushed tag
if: needs.trigger.outputs.is_release == 'true'
run: |
VERSION=${GITHUB_REF/refs\/tags\/v/} # stripping "refs/tags/v"
echo $VERSION
# Replace in-place version number in package __init__.py, also used by pyproject.toml
sed -i -e "s/^__version__\s*=.*$/__version__ = \"${VERSION}\"/g" discrete_optimization/__init__.py
cat discrete_optimization/__init__.py
- name: Build discrete-optimization wheel
run: python -m build --wheel
- name: Upload as build artifacts
uses: actions/upload-artifact@v4
with:
name: wheels
path: dist/*.whl
- name: get wheel version and save it
id: get_wheel_version
run: |
wheelfile=$(ls ./dist/discrete_optimization*.whl)
version=$(python -c "print('$wheelfile'.split('-')[1])")
echo "version=$version"
echo "version=$version" >> $GITHUB_OUTPUT
test:
needs: build
strategy:
fail-fast: false
matrix:
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
python-version: ["3.8", "3.11"]
wo_gurobi: ["", "without gurobi"]
include:
- os: "ubuntu-latest"
minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/squashfs-root/usr/bin; export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/bin/squashfs-root/usr/lib
minizinc_cache_path: $(pwd)/bin/squashfs-root
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.6.3/MiniZincIDE-2.6.3-x86_64.AppImage
minizinc_downloaded_filepath: bin/minizinc.AppImage
minizinc_install_cmdline: cd bin; sudo chmod +x minizinc.AppImage; sudo ./minizinc.AppImage --appimage-extract; cd ..
- os: "macos-latest"
minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/MiniZincIDE.app/Contents/Resources
minizinc_cache_path: $(pwd)/bin/MiniZincIDE.app
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.6.3/MiniZincIDE-2.6.3-bundled.dmg
minizinc_downloaded_filepath: bin/minizinc.dmg
minizinc_install_cmdline: sudo hdiutil attach bin/minizinc.dmg; sudo cp -R /Volumes/MiniZinc*/MiniZincIDE.app bin/.
- os: "windows-latest"
minizinc_config_cmdline: export PATH=$PATH:~/AppData/Local/Programs/MiniZinc
minizinc_cache_path: ~/AppData/Local/Programs/MiniZinc
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.6.3/MiniZincIDE-2.6.3-bundled-setup-win64.exe
minizinc_downloaded_filepath: minizinc_setup.exe
minizinc_install_cmdline: cmd //c "minizinc_setup.exe /verysilent /currentuser /norestart /suppressmsgboxes /sp"
- coverage: false # generally no coverage to avoid multiple reports
- coverage: true # coverage only for one entry of the matrix
os: "ubuntu-latest"
python-version: "3.11"
wo_gurobi: ""
exclude:
- os: "windows-latest"
wo_gurobi: "without gurobi"
- os: "macos-latest"
wo_gurobi: "without gurobi"
- os: "ubuntu-latest"
wo_gurobi: "without gurobi"
python-version: "3.8"
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash
steps:
- name: Checkout discrete-optimization source code
uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: pyproject.toml
- name: Download artifacts
uses: actions/download-artifact@v4
with:
name: wheels
path: wheels
- name: Install only discrete-optimization
run: |
python -m pip install -U pip
wheelfile=$(ls ./wheels/discrete_optimization*.whl)
pip install ${wheelfile}
- name: Check import work without minizinc if DO_SKIP_MZN_CHECK set
run: |
export DO_SKIP_MZN_CHECK=1
python -c "import discrete_optimization"
- name: Check import fails without minizinc if DO_SKIP_MZN_CHECK unset
run: |
python -c "
try:
import discrete_optimization
except RuntimeError:
pass
else:
raise AssertionError('We should not be able to import discrete_optimization without minizinc being installed.')
"
- name: Create bin/
run: mkdir -p bin
- name: get MininZinc path to cache
id: get-mzn-cache-path
run: |
echo "path=${{ matrix.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables
- name: Restore MiniZinc cache
id: cache-minizinc
uses: actions/cache@v4
with:
path: ${{ steps.get-mzn-cache-path.outputs.path }}
key: ${{ matrix.minizinc_url }}
- name: Download MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
curl -o "${{ matrix.minizinc_downloaded_filepath }}" -L ${{ matrix.minizinc_url }}
- name: Install MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
${{ matrix.minizinc_install_cmdline }}
- name: Test minizinc install
run: |
${{ matrix.minizinc_config_cmdline }}
minizinc --version
- name: Check imports are working
run: |
# configure minizinc
${{ matrix.minizinc_config_cmdline }}
# check imports
python tests/test_import_all_submodules.py
- name: Install test dependencies
run: |
wheelfile=$(ls ./wheels/discrete_optimization*.whl)
pip install ${wheelfile}[test]
if [ "${{ matrix.wo_gurobi }}" != "without gurobi" ]; then
echo "install gurobi"
pip install gurobipy
python -c "import gurobipy"
fi
- name: Restore tests data cache
id: cache-data
uses: actions/cache@v4
with:
path: ~/discrete_optimization_data
key: data-${{ hashFiles('discrete_optimization/datasets.py') }}
- name: Fetch data for tests
if: steps.cache-data.outputs.cache-hit != 'true'
run: |
${{ matrix.minizinc_config_cmdline }}
python -m discrete_optimization.datasets
- name: Test with pytest (no coverage)
if: ${{ !matrix.coverage }}
run: |
# configure minizinc
${{ matrix.minizinc_config_cmdline }}
# show library path used
pytest -s tests/show_do_path.py
# run test suite
MPLBACKEND="agg" NUMBA_BOUNDSCHECK=1 pytest \
-v --durations=0 --durations-min=10 \
tests
- name: Test with pytest (with coverage)
if: ${{ matrix.coverage }}
run: |
# configure minizinc
${{ matrix.minizinc_config_cmdline }}
# create a tmp directory from which running the tests
# so that "--cov discrete_optimization" look for package installed via the wheel
# instead of the source directory in the repository (which would lead to a coverage of 0%)
mkdir -p tmp && cd tmp
# show library path used
pytest -s ../tests/show_do_path.py
# run test suite
MPLBACKEND="agg" NUMBA_BOUNDSCHECK=1 pytest \
--cov discrete_optimization \
--cov-report xml:coverage.xml \
--cov-report html:coverage_html \
--cov-report term \
-v --durations=0 --durations-min=10 \
../tests
cd ..
- name: Upload coverage report as artifact
if: ${{ matrix.coverage }}
uses: actions/upload-artifact@v4
with:
name: coverage
path: |
tmp/coverage.xml
tmp/coverage_html
create-binder-env:
runs-on: ubuntu-latest
needs: [trigger, build]
env:
BINDER_ENV_DEFAULT_REF: binder # default reference: binder branch
outputs:
binder_env_ref: ${{ steps.get_binder_env_ref.outputs.binder_env_ref }}
steps:
- name: Initialize binder env reference
run: echo "BINDER_ENV_REF=$BINDER_ENV_DEFAULT_REF" >> $GITHUB_ENV
- name: Checkout binder branch
if: |
(needs.trigger.outputs.is_push_on_default_branch == 'true')
|| (needs.trigger.outputs.is_release == 'true')
uses: actions/checkout@v4
with:
ref: ${{ env.BINDER_ENV_DEFAULT_REF }}
- name: Update environment.yml and binder env reference
if: |
(needs.trigger.outputs.is_push_on_default_branch == 'true')
|| (needs.trigger.outputs.is_release == 'true')
run: |
if ${{ needs.trigger.outputs.is_release == 'true' }}; then
# RELEASE MODE
# Specify the proper discrete-optimization version
sed_replacement_pattern="\1- discrete-optimization==${{ needs.build.outputs.do_version }}"
if ${{ github.repository != 'airbus/discrete-optimization' && secrets.TEST_PYPI_API_TOKEN != '' }} == 'true'; then
# release to be deployed on TestPyPI
sed_replacement_pattern="${sed_replacement_pattern}\n\1- --extra-index-url https://test.pypi.org/simple/"
fi
sed_command="s|\(\s*\)-.*egg=discrete-optimization$|${sed_replacement_pattern}|"
echo $sed_command
sed -i -e "$sed_command" environment.yml
cat environment.yml
# Commit new environment
git config user.name "Actions"
git config user.email "[email protected]"
git commit environment.yml -m "Specify binder environment for release ${{ needs.build.outputs.do_version }}"
# Get sha1 to be used by binder for the environment, update the binder env reference
echo "BINDER_ENV_REF=$(git rev-parse --verify HEAD)" >> $GITHUB_ENV
# Revert for the default branch binder env
git revert HEAD -n
git commit -m "Specify binder environment for default branch"
else
# DEFAULT BRANCH MODE
# Specify the proper discrete-optimization version (current master sha1)
pip_spec="git+${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}@${GITHUB_SHA}#egg=discrete-optimization"
echo $pip_spec
sed -i -e "s|\(\s*\)-.*egg=discrete-optimization$|\1- ${pip_spec}|" environment.yml
cat environment.yml
# Commit new environment
git config user.name "Actions"
git config user.email "[email protected]"
git add environment.yml
if ! git diff-index --quiet HEAD -- environment.yml; then
# commit only if a difference (else would raise an error)
# no diff could happen if relaunching the whole workflow while this action was successful
git commit -m "Specify binder environment for default branch"
fi
fi
# Push binder branch with the updated environment
git push origin $BINDER_ENV_DEFAULT_REF
- name: Store binder env reference
id: get_binder_env_ref
run: |
echo "binder_env_ref=$BINDER_ENV_REF"
echo "binder_env_ref=$BINDER_ENV_REF" >> $GITHUB_OUTPUT
- uses: actions/checkout@v4 # checkout triggering branch to get scripts/trigger_binder.sh
- name: Trigger a build for default binder env ref on each BinderHub deployments in the mybinder.org federation
continue-on-error: true
if: |
(needs.trigger.outputs.is_push_on_default_branch == 'true')
|| (needs.trigger.outputs.is_release == 'true')
run: |
binder_env_full_ref=${GITHUB_REPOSITORY}/${BINDER_ENV_DEFAULT_REF}
echo Triggering binder environment build for ${binder_env_full_ref}
binderhubs_url="ovh.mybinder.org ovh2.mybinder.org notebooks.gesis.org/binder"
return_code=0
for binderhub in $binderhubs; do
echo "** on ${binderhub}"
# go on even though the script crashes
bash scripts/trigger_binder.sh https://${binderhub_url}/build/gh/${binder_env_full_ref} || ret_code=$?
# remember the potential crash
if [ $ret_code != 0 ]; then return_code=$ret_code; fi
done
# exit with last non-zero return code if any
exit $return_code
build-doc:
runs-on: ubuntu-latest
needs: create-binder-env
env:
python-version: "3.9"
minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/squashfs-root/usr/bin; export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/bin/squashfs-root/usr/lib
minizinc_cache_path: $(pwd)/bin/squashfs-root
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.6.3/MiniZincIDE-2.6.3-x86_64.AppImage
minizinc_downloaded_filepath: bin/minizinc.AppImage
minizinc_install_cmdline: cd bin; sudo chmod +x minizinc.AppImage; sudo ./minizinc.AppImage --appimage-extract; cd ..
steps:
- name: Set env variables for github+binder links in doc
run: |
# binder environment repo and branch
AUTODOC_BINDER_ENV_GH_REPO_NAME="${GITHUB_REPOSITORY}"
AUTODOC_BINDER_ENV_GH_BRANCH="${{ needs.create-binder-env.outputs.binder_env_ref }}"
# notebooks source repo and branch depending if it is a commit push or a PR
if [[ $GITHUB_REF == refs/pull* ]];
then
AUTODOC_NOTEBOOKS_REPO_URL="${GITHUB_SERVER_URL}/${{ github.event.pull_request.head.repo.full_name }}"
AUTODOC_NOTEBOOKS_BRANCH=${GITHUB_HEAD_REF}
elif [[ $GITHUB_REF == refs/heads* ]];
then
AUTODOC_NOTEBOOKS_REPO_URL=${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}
AUTODOC_NOTEBOOKS_BRANCH=${GITHUB_REF/refs\/heads\//}
elif [[ $GITHUB_REF == refs/tags* ]];
then
AUTODOC_NOTEBOOKS_REPO_URL=${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}
AUTODOC_NOTEBOOKS_BRANCH=${GITHUB_REF/refs\/tags\//}
fi
# export in GITHUB_ENV for next steps
echo "AUTODOC_BINDER_ENV_GH_REPO_NAME=${AUTODOC_BINDER_ENV_GH_REPO_NAME}" >> $GITHUB_ENV
echo "AUTODOC_BINDER_ENV_GH_BRANCH=${AUTODOC_BINDER_ENV_GH_BRANCH}" >> $GITHUB_ENV
echo "AUTODOC_NOTEBOOKS_REPO_URL=${AUTODOC_NOTEBOOKS_REPO_URL}" >> $GITHUB_ENV
echo "AUTODOC_NOTEBOOKS_BRANCH=${AUTODOC_NOTEBOOKS_BRANCH}" >> $GITHUB_ENV
# check computed variables
echo "Binder env: ${AUTODOC_BINDER_ENV_GH_REPO_NAME}/${AUTODOC_BINDER_ENV_GH_BRANCH}"
echo "Notebooks source: ${AUTODOC_NOTEBOOKS_REPO_URL}/tree/${AUTODOC_NOTEBOOKS_BRANCH}"
- uses: actions/checkout@v4
with:
submodules: true
- name: Setup python
uses: actions/setup-python@v5
with:
python-version: ${{ env.python-version }}
cache: "pip"
cache-dependency-path: |
pyproject.toml
docs/requirements.txt
- name: Download artifacts
uses: actions/download-artifact@v4
with:
name: wheels
path: wheels
- name: Create bin/
run: mkdir -p bin
- name: get MininZinc path to cache
id: get-mzn-cache-path
run: |
echo "path=${{ env.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables
- name: Restore MiniZinc cache
id: cache-minizinc
uses: actions/cache@v4
with:
path: ${{ steps.get-mzn-cache-path.outputs.path }}
key: ${{ env.minizinc_url }}
- name: Download MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
curl -o "${{ env.minizinc_downloaded_filepath }}" -L ${{ env.minizinc_url }}
- name: Install MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
${{ env.minizinc_install_cmdline }}
- name: Install doc dependencies
run: |
python -m pip install -U pip
wheelfile=$(ls ./wheels/discrete_optimization*.whl)
pip install ${wheelfile}
pip install -r docs/requirements.txt
- name: generate documentation
run: |
# configure minzinc (beware: paths are relative to default working directory)
${{ env.minizinc_config_cmdline }}
# move to documentation directory
cd docs
# generate api doc source files
sphinx-apidoc -o source/api -f -T ../discrete_optimization
# generate available notebooks list
python generate_nb_index.py
# build doc html pages
sphinx-build -M html source build
# specify it is a nojekyll site
touch build/html/.nojekyll
- name: upload as artifact
uses: actions/upload-artifact@v4
with:
name: doc
path: docs/build/html
deploy-master-doc:
# for default branch
needs: [trigger, create-binder-env, build-doc, test]
if: needs.trigger.outputs.is_push_on_default_branch == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
name: doc
path: docs/build/html
- name: Deploy documentation in a version subfolder on GH pages
uses: JamesIves/github-pages-deploy-action@v4
with:
branch: gh-pages # The branch the action should deploy to.
folder: docs/build/html # The folder the action should deploy.
target-folder: /${{ github.event.repository.default_branch }} # The folder the action should deploy to.
commit-message: publish documentation
single-commit: true
deploy:
# for release tags
runs-on: ubuntu-latest
needs: [trigger, test]
if: needs.trigger.outputs.is_release == 'true'
steps:
- name: Download wheels artifact
uses: actions/download-artifact@v4
with:
name: wheels
path: wheels
- name: Create the github release
uses: ncipollo/release-action@v1
with:
artifacts: wheels/*.whl
generateReleaseNotes: true
- name: Publish package to TestPyPI (only for forks)
env:
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
if: github.repository != 'airbus/discrete-optimization' && env.TEST_PYPI_API_TOKEN != ''
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.TEST_PYPI_API_TOKEN }}
repository_url: https://test.pypi.org/legacy/
packages_dir: wheels/
- name: Publish package to PyPI (main repo)
env:
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
if: github.repository == 'airbus/discrete-optimization' && env.PYPI_API_TOKEN != ''
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.PYPI_API_TOKEN }}
packages_dir: wheels/
deploy-release-doc:
# for release tags
needs: [trigger, build, create-binder-env, build-doc, deploy]
if: needs.trigger.outputs.is_release == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
name: doc
path: docs/build/html
- name: Deploy documentation in a version subfolder on GH pages
uses: JamesIves/github-pages-deploy-action@v4
with:
branch: gh-pages # The branch the action should deploy to.
folder: docs/build/html # The folder the action should deploy.
target-folder: /${{ needs.build.outputs.do_version }} # The folder the action should deploy to.
commit-message: publish documentation
single-commit: true
update-doc-versions:
# triggers even if only one needed job succeeded (typically at least one will be skipped)
needs: [deploy-release-doc, deploy-master-doc]
if: |
always()
&& (
(needs.deploy-master-doc.result == 'success')
|| (needs.deploy-release-doc.result == 'success')
)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
ref: gh-pages
- uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Generate versions.json
shell: python3 {0}
run: |
import json
from pathlib import Path
cwd = Path.cwd()
versions = sorted((item.name for item in cwd.iterdir()
if item.is_dir() and not item.name.startswith('.')),
reverse=True)
target_file = Path('versions.json')
with target_file.open('w') as f:
json.dump(versions, f)
- name: Commit versions.json
shell: bash
run: |
# Commit versions.json and squash it with previous commit
git config user.name "Actions"
git config user.email "[email protected]"
git add versions.json
git commit --amend --no-edit
git push -f origin gh-pages