Skip to content

Commit

Permalink
Added jupyter_ai, v1 tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Piyush Jain authored and claytonparnell committed Oct 20, 2023
1 parent 5f05cd9 commit 5922998
Show file tree
Hide file tree
Showing 19 changed files with 279 additions and 2 deletions.
13 changes: 13 additions & 0 deletions test/test_artifacts/v1/autogluon.test.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
ARG COSMOS_IMAGE
FROM $COSMOS_IMAGE as base

ARG MAMBA_DOCKERFILE_ACTIVATE=1

RUN micromamba install --freeze-installed -y conda-forge::pytest conda-forge::jupyter

RUN git clone --recursive https://github.com/autogluon/autogluon.git

WORKDIR "autogluon"
COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_autogluon_tests.sh .
RUN chmod +x run_autogluon_tests.sh
CMD ["./run_autogluon_tests.sh"]
16 changes: 16 additions & 0 deletions test/test_artifacts/v1/boto3.test.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
ARG COSMOS_IMAGE
FROM $COSMOS_IMAGE

ARG MAMBA_DOCKERFILE_ACTIVATE=1
RUN sudo apt-get update && sudo apt-get install -y git && \
git clone --recursive https://github.com/boto/boto3.git && \
:

# For Running boto3 tests, we need pytest
RUN micromamba install -y --freeze-installed conda-forge::pytest


WORKDIR "boto3"
COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_boto3_tests.sh .
RUN chmod +x run_boto3_tests.sh
CMD ["./run_boto3_tests.sh"]
6 changes: 6 additions & 0 deletions test/test_artifacts/v1/jupyter-ai.test.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
ARG COSMOS_IMAGE
FROM $COSMOS_IMAGE

ARG MAMBA_DOCKERFILE_ACTIVATE=1

CMD ["python", "-c", "import jupyter_ai"]
25 changes: 25 additions & 0 deletions test/test_artifacts/v1/keras.test.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
ARG COSMOS_IMAGE
FROM $COSMOS_IMAGE

ARG MAMBA_DOCKERFILE_ACTIVATE=1

RUN sudo apt-get update && sudo apt-get install -y git graphviz && \
git clone --recursive https://github.com/keras-team/keras-io.git && \
:

# Some of the keras guides requires pydot and graphviz to be installed
RUN micromamba install -y --freeze-installed conda-forge::pydot nvidia::cuda-nvcc
ENV XLA_FLAGS=--xla_gpu_cuda_data_dir=/opt/conda

WORKDIR "keras-io/guides"

# Checkout a specific commit known to be compatible with the runtime's current version of TensorFlow.
# keras-io made backwards incompatible changes that broke these tests. Pinning at this commit for now
# at least until the runtime's TensorFlow dependency is upgraded to the next minor version
RUN git checkout 861b59747b43ce326bb0a12384a07d6632249901

COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_keras_tests.sh .
RUN chmod +x run_keras_tests.sh
# Run tests in run_keras_tests.sh
CMD ["./run_keras_tests.sh"]

19 changes: 19 additions & 0 deletions test/test_artifacts/v1/matplotlib.test.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
ARG COSMOS_IMAGE
FROM $COSMOS_IMAGE

ARG MAMBA_DOCKERFILE_ACTIVATE=1

RUN sudo apt-get update && sudo apt-get install -y git && \
git clone --recursive https://github.com/matplotlib/matplotlib.git && \
:

# TODO: Come up with a different way to test matplotlib installation.
# Currently we will be running all the python files in galleries/tutorials
# But this directory structure might change in the future. In the past, "galleries/tutorials"
# didn't exist. Previously the repository just had a "tutorials" folder.
WORKDIR "matplotlib/galleries/tutorials"
COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_matplotlib_tests.sh .
RUN chmod +x run_matplotlib_tests.sh
# Run tests in run_matplotlib_tests.sh
CMD ["./run_matplotlib_tests.sh"]

13 changes: 13 additions & 0 deletions test/test_artifacts/v1/numpy.test.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
ARG COSMOS_IMAGE
FROM $COSMOS_IMAGE

ARG MAMBA_DOCKERFILE_ACTIVATE=1

# Inorder to test numpy, we need pytest and hypothesis to be installed.
RUN micromamba install -y --freeze-installed conda-forge::pytest conda-forge::hypothesis
# Some unit tests in numpy requires gcc to be installed.
RUN sudo apt-get update && sudo apt-get install -y gcc
# Check https://numpy.org/doc/stable/reference/testing.html
# numpy.test() returns True if tests succeed else False.
# We need to flip the result so that we exit with status code as 0 if all the tests succeeded.
CMD ["python", "-c", "import numpy,sys; tests_succeeded = numpy.test(); sys.exit(not tests_succeeded)"]
8 changes: 8 additions & 0 deletions test/test_artifacts/v1/pandas.test.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
ARG COSMOS_IMAGE
FROM $COSMOS_IMAGE

ARG MAMBA_DOCKERFILE_ACTIVATE=1
RUN micromamba install -y --freeze-installed -c conda-forge pytest hypothesis

COPY --chown=$MAMBA_USER:$MAMBA_USER run_pandas_tests.py .
CMD ["python", "run_pandas_tests.py"]
20 changes: 20 additions & 0 deletions test/test_artifacts/v1/pytorch.examples.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
ARG COSMOS_IMAGE
FROM $COSMOS_IMAGE

ARG MAMBA_DOCKERFILE_ACTIVATE=1
RUN sudo apt-get update && \
sudo apt-get install -y git && \
git clone --recursive https://github.com/pytorch/examples && \
:

WORKDIR "examples"

# There is a line in run_python_examples.sh which looks like: BASE_DIR=`pwd`"/"`dirname $0`
# When we run the shell script through /usr/local/bin/_entrypoint.sh, that line above doesn't work correctly. In our
# case, we properly set `pwd` to the directory that contains all the examples, so we just modify the script to change
# the previous line to look like: BASE_DIR=`pwd`
RUN sed -i 's/^BASE_DIR=.*pwd.*dirname.*/BASE_DIR=`pwd`/' run_python_examples.sh
RUN ./run_python_examples.sh install_deps

# We skip `imagenet` because it requires a lot of resources and so aren't a good fit for us.
CMD ["./run_python_examples.sh", "dcgan,fast_neural_style,distributed,mnist,mnist_forward_forward,mnist_hogwild,mnist_rnn,regression,reinforcement_learning,siamese_network,super_resolution,time_sequence_prediction,vae,word_language_model,fx"]
20 changes: 20 additions & 0 deletions test/test_artifacts/v1/run_pandas_tests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import pandas, sys, os, site

# We change the working directory here because there is at least one test (`test_html_template_extends_options`) which
# expects the directory to be 'pandas'. Ideally, we would have changed directories through a `WORKDIR` in Dockerfile
# but unfortunately it doesn't accept dynamic arguments.
site_packages_dir = site.getsitepackages()[0]
os.chdir(site_packages_dir)

# pandas.test() by default runs with `-m "not slow and not network and not db"`. However, we found a few tests in the
# test_network.py file that should have been marked as "network" but weren't, so we skip those here. We skip S3 specific
# tests for the same reason.
# We skip `test_plain_axes` too: the Pandas dev environment expects matplotlib to be ">=3.6.1, <3.7.0" but the runtime
# expectation is just ">=3.6.1". Our image contains v3.7.1, so it meets the latter requirement but not the former. This
# particular test, however, only works with the former requirement. (We verified that the test succeeds if we manually
# drop the version to v3.6.x) So, we skip it.
tests_succeeded = pandas.test([
'-m', '(not slow and not network and not db)',
'-k', '(not test_network and not s3 and not test_plain_axes)'])

sys.exit(not tests_succeeded)
12 changes: 12 additions & 0 deletions test/test_artifacts/v1/scipy.test.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
ARG COSMOS_IMAGE
FROM $COSMOS_IMAGE

ARG MAMBA_DOCKERFILE_ACTIVATE=1

# Inorder to test scipy, we need pytest and hypothesis to be installed.
RUN micromamba install -y --freeze-installed conda-forge::pytest conda-forge::hypothesis conda-forge::scipy-tests
# Check https://github.com/numpy/numpy/blob/main/doc/TESTS.rst
# Note: Testing guidelines are same for numpy and scipy.
# scipy.test() returns True if tests succeed else False.
# We need to flip the result so that we exit with status code as 0 if all the tests succeeded.
CMD ["python", "-c", "import scipy,sys; tests_succeeded = scipy.test(); sys.exit(not tests_succeeded)"]
17 changes: 17 additions & 0 deletions test/test_artifacts/v1/scripts/run_autogluon_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#!/bin/bash

AUTOGLUON_VERSION=$(micromamba list | grep autogluon | tr -s ' ' | cut -d ' ' -f 3)
git checkout tags/v$AUTOGLUON_VERSION

# Run autogluon quick start as end-to-end check
jupyter nbconvert --execute --to python docs/tutorials/tabular/tabular-quick-start.ipynb
jupyter nbconvert --execute --to python docs/tutorials/timeseries/forecasting-quick-start.ipynb

# Detect gpu and run multimodal quick start if presented
python -c "import torch; exit(0) if torch.cuda.is_available() else exit(1)"
ret=$?

if [ $ret -eq 0 ]
then
jupyter nbconvert --execute --to python docs/tutorials/multimodal/multimodal_prediction/multimodal-quick-start.ipynb
fi
10 changes: 10 additions & 0 deletions test/test_artifacts/v1/scripts/run_boto3_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
#!/bin/bash

# We need to checkout the version of boto3 that is installed in the mamba environment.

boto3_version=$(micromamba list | grep boto3 | tr -s ' ' | cut -d ' ' -f 3)
# Checkout the corresponding boto3 version
git checkout tags/$boto3_version

# Run the unit and functional tests
pytest tests/unit tests/functional || exit $?
11 changes: 11 additions & 0 deletions test/test_artifacts/v1/scripts/run_keras_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
#!/bin/bash

# Ref: https://keras.io/guides/, https://github.com/keras-team/keras-io/tree/master

for file in *.py; do
if [ "$file" != "transfer_learning.py" ]; then
# skipping transfer_learning.py because it has 20 epochs and it takes a very long time to execute
# https://github.com/keras-team/keras-io/blob/master/guides/transfer_learning.py#L562
python "$file" || exit $?
fi
done
7 changes: 7 additions & 0 deletions test/test_artifacts/v1/scripts/run_matplotlib_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#!/bin/bash
# Run all the tutorials
for file in *.py; do
python "$file" || exit $?
done


20 changes: 20 additions & 0 deletions test/test_artifacts/v1/scripts/run_pysdk_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#!/bin/bash

# We need to checkout the version of sagemaker-python-sdk that is installed in the mamba environment.

pysdk_version=$(micromamba list | grep sagemaker-python-sdk | tr -s ' ' | cut -d ' ' -f 3)
# Checkout the corresponding sagemaker-python-sdk version
git checkout tags/v$pysdk_version

# Install test dependencies of sagemaker-python-sdk
# Using pip as some of the packages are not available on conda-forge
pip install -r requirements/extras/test_requirements.txt

# Run the unit tests, ignoring tests which require AWS Configuration
# TODO: Re-evaluate the ignored tests since we are setting the AWS_DEFAULT_REGION as part of the Dockerfile.
pytest tests/unit --ignore=tests/unit/sagemaker/feature_store/ --ignore=tests/unit/sagemaker/jumpstart/ --ignore=tests/unit/sagemaker/workflow/ \
--ignore=tests/unit/sagemaker/async_inference --ignore=tests/unit/test_model_card.py --ignore=tests/unit/test_model_card.py --ignore=tests/unit/test_processing.py \
--ignore=tests/unit/test_tensorboard.py --ignore=tests/unit/sagemaker/async_inference --ignore=tests/unit/sagemaker/experiments --ignore tests/unit/sagemaker/local \
--ignore tests/unit/sagemaker/monitor/test_data_capture_config.py --ignore tests/unit/sagemaker/experiments --ignore tests/unit/sagemaker/remote_function \
--ignore tests/unit/sagemaker/model/test_deploy.py --deselect tests/unit/test_estimator.py::test_insert_invalid_source_code_args \
--deselect tests/unit/sagemaker/tensorflow/test_estimator.py::test_insert_invalid_source_code_args || exit $?
13 changes: 13 additions & 0 deletions test/test_artifacts/v1/sm-python-sdk.test.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
ARG COSMOS_IMAGE
FROM $COSMOS_IMAGE

ARG MAMBA_DOCKERFILE_ACTIVATE=1
RUN sudo apt-get update && sudo apt-get install -y git && \
git clone --recursive https://github.com/aws/sagemaker-python-sdk.git && \
:
# Sagemaker Python SDK's unit tests requires AWS_DEFAULT_REGION to be set. So, using an arbitrary value of us-east-1
ENV AWS_DEFAULT_REGION=us-east-1
WORKDIR "sagemaker-python-sdk"
COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/run_pysdk_tests.sh .
RUN chmod +x run_pysdk_tests.sh
CMD ["./run_pysdk_tests.sh"]
16 changes: 16 additions & 0 deletions test/test_artifacts/v1/tensorflow.examples.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
ARG COSMOS_IMAGE
FROM $COSMOS_IMAGE

ARG MAMBA_DOCKERFILE_ACTIVATE=1
RUN sudo apt-get update && \
sudo apt-get install -y git && \
git clone --recursive https://github.com/tensorflow/docs.git && \
:

WORKDIR "docs/site/en/guide"
COPY --chown=$MAMBA_USER:$MAMBA_USER tensorflow ./
RUN chmod +x run_tensorflow_example_notebooks.sh

RUN micromamba install -y --freeze-installed -c conda-forge papermill

CMD ["./run_tensorflow_example_notebooks.sh"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
#!/bin/bash

# Create an empty notebook file for papermill's output
touch nb_output.ipynb

# List of all referenced notebook files in Basics, Core, and In Depth sections of Tensorflow docs, excluding experimentals.
# https://www.tensorflow.org/guide
example_notebooks=('basics.ipynb'
'tensor.ipynb'
'variable.ipynb'
'autodiff.ipynb'
'intro_to_graphs.ipynb'
'intro_to_modules.ipynb'
'basic_training_loops.ipynb'
'core/quickstart_core.ipynb'
'core/logistic_regression_core.ipynb'
'core/mlp_core.ipynb'
'core/matrix_core.ipynb'
'core/optimizers_core.ipynb'
'tensor_slicing.ipynb'
'advanced_autodiff.ipynb'
'ragged_tensor.ipynb'
'sparse_tensor.ipynb'
'random_numbers.ipynb'
)

for nb in ${example_notebooks[@]}; do
papermill $nb 'nb_output.ipynb'
done
6 changes: 4 additions & 2 deletions test/test_dockerfile_based_harness.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@
("pandas.test.Dockerfile", ['pandas']),
("sm-python-sdk.test.Dockerfile", ['sagemaker-python-sdk']),
("pytorch.examples.Dockerfile", ['pytorch']),
("tensorflow.examples.Dockerfile", ['tensorflow'])])
("tensorflow.examples.Dockerfile", ['tensorflow']),
("jupyter-ai.test.Dockerfile", ['jupyter-ai'])])
def test_dockerfiles_for_cpu(dockerfile_path: str, required_packages: List[str],
local_image_version: str, use_gpu: bool):
_validate_docker_images(dockerfile_path, required_packages, local_image_version, use_gpu, 'cpu')
Expand All @@ -43,7 +44,8 @@ def test_dockerfiles_for_cpu(dockerfile_path: str, required_packages: List[str],
("pandas.test.Dockerfile", ['pandas']),
("sm-python-sdk.test.Dockerfile", ['sagemaker-python-sdk']),
("pytorch.examples.Dockerfile", ['pytorch']),
("tensorflow.examples.Dockerfile", ['tensorflow'])])
("tensorflow.examples.Dockerfile", ['tensorflow']),
("jupyter-ai.test.Dockerfile", ['jupyter-ai'])])
def test_dockerfiles_for_gpu(dockerfile_path: str, required_packages: List[str],
local_image_version: str, use_gpu: bool):
_validate_docker_images(dockerfile_path, required_packages, local_image_version, use_gpu, 'gpu')
Expand Down

0 comments on commit 5922998

Please sign in to comment.