Add support for using a Llama.cpp binary and model from TurnkeyML #544
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This workflow will install Python dependencies, run tests and lint with a single version of Python | |
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python | |
name: Lint and Test TurnkeyML | |
on: | |
push: | |
branches: ["main", "canary", "refresh"] | |
pull_request: | |
branches: ["main", "canary", "refresh"] | |
permissions: | |
contents: read | |
jobs: | |
build-turnkey: | |
env: | |
TURNKEY_TRACEBACK: True | |
strategy: | |
matrix: | |
python-version: ["3.8", "3.11"] | |
os: [ubuntu-latest, windows-latest] | |
runs-on: ${{ matrix.os }} | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Set up Miniconda with 64-bit Python | |
uses: conda-incubator/setup-miniconda@v3 | |
with: | |
miniconda-version: "latest" | |
activate-environment: tkml | |
python-version: ${{ matrix.python-version }} | |
- name: Install dependencies | |
shell: bash -el {0} | |
run: | | |
python -m pip install --upgrade pip | |
conda install pylint=3.2.7 | |
pip install pytest | |
pip install -e plugins/devices | |
pip install transformers timm | |
pip install -e . # Required to test current tkml package instead of pypi version | |
python -m pip check | |
- name: Lint with PyLint | |
shell: bash -el {0} | |
run: | | |
pylint src/turnkeyml --rcfile .pylintrc --ignore-paths src/turnkeyml/llm --jobs=1 | |
pylint examples --rcfile .pylintrc --disable E0401,E0611 --jobs=1 | |
- name: Test with unittest | |
shell: bash -el {0} | |
run: | | |
# Unit tests | |
python test/unit.py | |
# turnkey examples | |
# Note: we clear the default cache location prior to each block of example runs | |
rm -rf ~/.cache/turnkey | |
python examples/api/onnx_opset.py --onnx-opset 15 | |
python examples/api/loading_a_build.py | |
rm -rf ~/.cache/turnkey | |
turnkey -i examples/cli/scripts/hello_world.py discover export-pytorch benchmark | |
rm -rf ~/.cache/turnkey | |
turnkey -i examples/cli/scripts/multiple_invocations.py discover export-pytorch benchmark | |
rm -rf ~/.cache/turnkey | |
turnkey -i examples/cli/scripts/max_depth.py discover --max-depth 1 export-pytorch benchmark | |
rm -rf ~/.cache/turnkey | |
turnkey -i examples/cli/scripts/two_models.py discover export-pytorch benchmark | |
rm -rf ~/.cache/turnkey | |
turnkey -i examples/cli/onnx/hello_world.onnx load-onnx benchmark | |
# E2E tests | |
cd test/ | |
python cli.py | |
python analysis.py | |
- name: Test example plugins | |
shell: bash -el {0} | |
run: | | |
rm -rf ~/.cache/turnkey | |
pip install -e examples/cli/plugins/example_tool | |
turnkey -i examples/cli/scripts/hello_world.py discover export-pytorch example-plugin-tool benchmark | |
# - name: Install and Start Slurm | |
# if: runner.os != 'Windows' | |
# shell: bash -el {0} | |
# run: | | |
# sudo apt update -y | |
# sudo apt install slurm-wlm -y | |
# cp test/helpers/slurm.conf test/helpers/slurm_modified.conf | |
# sed -i "s/YOUR_HOSTNAME_HERE/$HOSTNAME/" test/helpers/slurm_modified.conf | |
# sudo mv test/helpers/slurm_modified.conf /etc/slurm/slurm.conf | |
# sudo service slurmd start | |
# sudo service slurmctld start | |
# sudo service munge start | |
# - name: Test turnkey on Slurm | |
# if: runner.os != 'Windows' | |
# shell: bash -el {0} | |
# run: | | |
# # Create conda environment for Slurm using srun (sbatch + wait) | |
# export SKIP_REQUIREMENTS_INSTALL="True" | |
# export TORCH_CPU="True" | |
# srun src/turnkeyml/cli/setup_venv.sh | |
# # Run tests on Slurm | |
# export TURNKEY_SLURM_USE_DEFAULT_MEMORY="True" | |
# turnkey -i models/selftest/linear.py --use-slurm --cache-dir local_cache discover export-pytorch | |
# bash test/helpers/check_slurm_output.sh slurm-2.out | |
# Below tests are commented out as the GitHub runner runs out of space installing the requirements | |
# - name: Check installation of requirements.txt and their compatibility with turnkey | |
# shell: bash -el {0} | |
# run: | | |
# conda create --name test-requirements python=3.8 | |
# conda activate test-requirements | |
# pip install -r models/requirements.txt | |
# python -m pip check | |
# python -c "import torch_geometric" | |
# conda deactivate |