Skip to content

Remove stale trackers directory #395

Remove stale trackers directory

Remove stale trackers directory #395

Workflow file for this run

# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Lint and Test TurnkeyML
on:
push:
branches: ["main", "canary", "refresh"]
pull_request:
branches: ["main", "canary", "refresh"]
permissions:
contents: read
jobs:
build-turnkey:
env:
TURNKEY_TRACEBACK: True
strategy:
matrix:
python-version: ["3.8", "3.10"]
os: [ubuntu-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- name: Set up Miniconda with 64-bit Python
uses: conda-incubator/setup-miniconda@v3
with:
miniconda-version: "latest"
activate-environment: tkml
python-version: ${{ matrix.python-version }}
- name: Install dependencies
shell: bash -el {0}
run: |
python -m pip install --upgrade pip
conda install pylint
pip install pytest
pip install -e .
pip install transformers timm
python -m pip check
- name: Lint with PyLint
shell: bash -el {0}
run: |
pylint src/turnkeyml --rcfile .pylintrc
pylint examples --rcfile .pylintrc --ignore-paths examples/build_api --disable E0401,E0611
- name: Test with unittest
shell: bash -el {0}
run: |
# Unit tests
python test/unit.py
# turnkey examples
# Note: we clear the default cache location prior to each example run
rm -rf ~/.cache/turnkey
python examples/files_api/onnx_opset.py --onnx-opset 15
rm -rf ~/.cache/turnkey
turnkey -i examples/cli/scripts/hello_world.py discover export-pytorch benchmark
rm -rf ~/.cache/turnkey
turnkey -i examples/cli/scripts/multiple_invocations.py discover export-pytorch benchmark
rm -rf ~/.cache/turnkey
turnkey -i examples/cli/scripts/max_depth.py discover --max-depth 1 export-pytorch benchmark
rm -rf ~/.cache/turnkey
turnkey -i examples/cli/scripts/two_models.py discover export-pytorch benchmark
rm -rf ~/.cache/turnkey
turnkey -i examples/cli/onnx/hello_world.onnx load-onnx benchmark
# E2E tests
cd test/
python cli.py
python analysis.py
- name: Test example plugins
shell: bash -el {0}
run: |
rm -rf ~/.cache/turnkey
pip install -e examples/cli/plugins/example_rt
turnkey -i examples/cli/scripts/hello_world.py discover export-pytorch benchmark --runtime example-rt
rm -rf ~/.cache/turnkey
pip install -e examples/cli/plugins/example_tool
turnkey -i examples/cli/scripts/hello_world.py discover export-pytorch example-plugin-tool benchmark
rm -rf ~/.cache/turnkey
pip install -e examples/cli/plugins/example_combined
turnkey -i examples/cli/scripts/hello_world.py discover export-pytorch combined-example-tool benchmark --runtime example-combined-rt --rt-args delay_before_benchmarking::5
turnkey -i examples/cli/scripts/hello_world.py discover export-pytorch combined-example-tool benchmark --device example_family::part1::config2
turnkey -i examples/cli/scripts/hello_world.py discover export-pytorch combined-example-tool benchmark --device example_family::part1::config1
turnkey -i examples/cli/scripts/hello_world.py discover export-pytorch combined-example-tool benchmark --device example_family::part1
turnkey -i examples/cli/scripts/hello_world.py discover export-pytorch combined-example-tool benchmark --device example_family
# E2E tests
cd test
python plugins.py
- name: Install and Start Slurm
if: runner.os != 'Windows'
shell: bash -el {0}
run: |
sudo apt update -y
sudo apt install slurm-wlm -y
cp test/helpers/slurm.conf test/helpers/slurm_modified.conf
sed -i "s/YOUR_HOSTNAME_HERE/$HOSTNAME/" test/helpers/slurm_modified.conf
sudo mv test/helpers/slurm_modified.conf /etc/slurm/slurm.conf
sudo service slurmd start
sudo service slurmctld start
sudo service munge start
- name: Test turnkey on Slurm
if: runner.os != 'Windows'
shell: bash -el {0}
run: |
# Create conda environment for Slurm using srun (sbatch + wait)
export SKIP_REQUIREMENTS_INSTALL="True"
export TORCH_CPU="True"
srun src/turnkeyml/cli/setup_venv.sh
# Run tests on Slurm
export TURNKEY_SLURM_USE_DEFAULT_MEMORY="True"
turnkey -i models/selftest/linear.py --use-slurm --cache-dir local_cache discover export-pytorch
bash test/helpers/check_slurm_output.sh slurm-2.out
# Below tests are commented out as the GitHub runner runs out of space installing the requirements
# - name: Check installation of requirements.txt and their compatibility with turnkey
# shell: bash -el {0}
# run: |
# conda create --name test-requirements python=3.8
# conda activate test-requirements
# pip install -r models/requirements.txt
# python -m pip check
# python -c "import torch_geometric"
# conda deactivate