Skip to content

Run python3 dev/update_mlflow_versions.py post-release... (#13772) #133

Run python3 dev/update_mlflow_versions.py post-release... (#13772)

Run python3 dev/update_mlflow_versions.py post-release... (#13772) #133

Workflow file for this run

name: Recipe tests
on:
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
push:
branches:
- master
- branch-[0-9]+.[0-9]+
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
# Use `bash --noprofile --norc -exo pipefail` by default for all `run` steps in this workflow:
# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#defaultsrun
defaults:
run:
shell: bash --noprofile --norc -exo pipefail {0}
env:
MLFLOW_HOME: /home/runner/work/mlflow/mlflow
# Note miniconda is pre-installed in the virtual environments for GitHub Actions:
# https://github.com/actions/virtual-environments/blob/main/images/linux/scripts/installers/miniconda.sh
MLFLOW_CONDA_HOME: /usr/share/miniconda
SPARK_LOCAL_IP: localhost
PYTHONUTF8: "1"
jobs:
recipes:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: ./.github/actions/untracked
- uses: ./.github/actions/setup-python
- uses: ./.github/actions/setup-pyenv
- name: Install dependencies
run: |
source ./dev/install-common-deps.sh
pip install 'numpy<2.0'
- name: Run tests
run: |
pytest tests/recipes
recipes-windows:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
group: [1, 2]
include:
- splits: 2
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: ./.github/actions/untracked
- uses: ./.github/actions/setup-python
- uses: ./.github/actions/setup-pyenv
- name: Install python dependencies
run: |
pip install -r requirements/test-requirements.txt
pip install --no-dependencies tests/resources/mlflow-test-plugin
pip install .
# TODO: Importing datasets in a pandas UDF (created by mlflow.pyfunc.spark_udf) crashes
# the Python worker. To avoid this, uninstall `datasets`. This is a temporary workaround.
pip uninstall -y datasets
pip install 'numpy<2.0'
- name: Download Hadoop winutils for Spark
run: |
git clone https://github.com/cdarlint/winutils /tmp/winutils
- name: Run tests
env:
# The default pooling implmentation 'QueuePool' can lead to errors on Windows when
# multiple threads access the same SQLite database file simultaneously.
MLFLOW_SQLALCHEMYSTORE_POOLCLASS: "NullPool"
run: |
# Set Hadoop environment variables required for testing Spark integrations on Windows
export HADOOP_HOME=/tmp/winutils/hadoop-3.2.2
export PATH=$PATH:$HADOOP_HOME/bin
export MLFLOW_HOME=$(pwd)
pytest --splits ${{ matrix.splits }} --group ${{ matrix.group }} tests/recipes