-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.travis.yml
279 lines (268 loc) · 12.9 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
###############################################################################
language: python
###############################################################################
# Cache data which has to be downloaded on every build.
# This is just for Travis and doesn't do anything on Shippable.
cache:
directories:
# Cache files downloaded by pip
- $HOME/.cache/pip
# Cache our miniconda download.
# Cautionary note: if a new version of Python is released and added to
# conda and you want to test on the new version, your cached copy will be
# out of date and you'll need to wipe the cache. Details are here
# https://docs.travis-ci.com/user/caching/#Clearing-Caches
# but if this sounds like too much hassle, you can just comment out this
# line to stop caching.
- $HOME/Downloads
- $HOME/miniconda
###############################################################################
python:
# This is a flag for the built-in version of Python provided by the CI-server
# provider, which we don't use in favour of conda. But we use this to pick
# out which python version we install with conda, since it means the provider
# gets appropriate metadata to keep things organised.
- "3.4"
###############################################################################
env:
matrix:
- USE_OLDEST_DEPENDENCIES="false"
#- USE_OLDEST_DEPENDENCIES="true"
###############################################################################
# Setup the environment before installing
before_install:
# Remember the directory where our repository to test is located
- REPOPATH="$(pwd)" && pwd
# ---------------------------------------------------------------------------
# Check which versions of numpy and scipy we are using, then remove these
# lines from requirements.txt
- if [ -f requirements.txt ]; then
NUMPY_REQUIREMENT="$(grep '^numpy\([!<>=~ ]\|$\)' requirements.txt)";
echo "NumPy requirement is '$NUMPY_REQUIREMENT'";
SCIPY_REQUIREMENT="$(grep '^scipy\([!<>=~ ]\|$\)' requirements.txt)";
echo "SciPy requirement is '$SCIPY_REQUIREMENT'";
sed '/^\(num\|sci\)py\([!<>=~ ]\|$\)/d' requirements.txt >
requirements.txt.tmp &&
mv requirements.txt.tmp requirements.txt;
fi;
# ---------------------------------------------------------------------------
# Use a space separated list of package names as PACKAGES_TO_CONDA
- PACKAGES_TO_CONDA="Cython h5py";
# Now we automatically search for these package settings in requirements.txt
- OTHER_CONDA_REQUIREMENTS="";
if [ -f requirements.txt ]; then
while read -r PKG_NAME; do
THIS_REQUIREMENT="$(grep "^$PKG_NAME\([!<>=~ ]\|$\)" requirements.txt)";
OTHER_CONDA_REQUIREMENTS+=" $THIS_REQUIREMENT";
sed -i "/^$PKG_NAME\([!<>=~ ]\|$\)/d" requirements.txt;
done < <(echo "$PACKAGES_TO_CONDA" | tr ' ' '\n');
fi;
echo $OTHER_CONDA_REQUIREMENTS;
# ---------------------------------------------------------------------------
# Update the package list
- travis_retry sudo apt-get update
- travis_retry sudo apt-get install python3-dev libevent-dev
# Install numpy/scipy dependencies with apt-get. We want ATLAS and LAPACK.
- if [[ "$NUMPY_REQUIREMENT" != "" ]] || [[ "$SCIPY_REQUIREMENT" != "" ]]; then
travis_retry sudo apt-get install -y libatlas-dev libatlas-base-dev;
fi;
if [[ "$SCIPY_REQUIREMENT" != "" ]]; then
travis_retry sudo apt-get install -y liblapack-dev;
fi;
# ---------------------------------------------------------------------------
# If we want to run the tests using the oldest set of dependencies we
# support, modify any *requirements*.txt files every '>=' becomes '=='.
- if [[ "$USE_OLDEST_DEPENDENCIES" == "true" ]]; then
for FILE in *requirements*.txt; do
sed -e 's/>=/~=/g' $FILE > $FILE.tmp && mv $FILE.tmp $FILE;
done;
fi;
# ---------------------------------------------------------------------------
# The following is based on Minicoda's how-to Travis page
# http://conda.pydata.org/docs/travis.html
# ---------------------------------------------------------------------------
# Download miniconda. Only do this if the cached file isn't present.
- mkdir -p $HOME/Downloads;
if [ ! -f $HOME/Downloads/miniconda.sh ]; then
travis_retry wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O "$HOME/Downloads/miniconda.sh";
fi;
# Install miniconda to the home directory, if it isn't there already.
- if [ ! -d "$HOME/miniconda/bin" ]; then
if [ -d "$HOME/miniconda" ]; then rm -r "$HOME/miniconda"; fi;
bash $HOME/Downloads/miniconda.sh -b -p "$HOME/miniconda";
fi;
- ls -alh "$HOME/miniconda";
# Add conda to the path and automatically say yes to any check from conda
- export PATH="$HOME/miniconda/bin:$PATH";
hash -r;
conda config --set always_yes yes --set changeps1 no
# Remove test environment from conda, if it's still there from last time
- conda remove -n testenv --all || echo "No 'testenv' environment to remove";
# Update conda
- travis_retry conda update -q conda
# Useful for debugging any issues with conda
- conda info -a
- conda list
#
# If necessary, check which is the earliest version of numpy and scipy
# available on conda for this version of python.
# Because any given version of scipy is only available for a narrow range
# of numpy versions, we constrain only scipy and not numpy to its oldest
# possible requirement when scipy is being installed. The version of numpy
# we end up must still satisfy the original requirement.txt setting, and
# be from around the time of the oldest supported scipy release.
- if [[ "$USE_OLDEST_DEPENDENCIES" == "true" ]]; then
if [[ "$SCIPY_REQUIREMENT" != "" ]]; then
SCIPY_REQUIREMENT="scipy==$(bash
./continuous_integration/conda_min_version.sh
"$SCIPY_REQUIREMENT" "$TRAVIS_PYTHON_VERSION")";
elif [[ "$NUMPY_REQUIREMENT" != "" ]]; then
NUMPY_REQUIREMENT="numpy==$(bash
./continuous_integration/conda_min_version.sh
"$NUMPY_REQUIREMENT" "$TRAVIS_PYTHON_VERSION")";
fi;
fi;
# Create the conda environment with pip, numpy and scipy installed (if they
# are in requirements.txt)
- conda create -q -n testenv python=$TRAVIS_PYTHON_VERSION
pip $NUMPY_REQUIREMENT $SCIPY_REQUIREMENT $OTHER_CONDA_REQUIREMENTS
# If you get an error from this command which looks like this:
# Error: Unsatisfiable package specifications.
# Generating hint:
# [ COMPLETE ]|###########| 100%
# Hint: the following packages conflict with each other:
# - numpy >=1.9.0
# - scipy ==0.12.0
#
# This is because you have constrained the numpy version in requirements.txt
# to a more recent set of values (e.g. numpy>=1.9.0) than the scipy
# constraint (e.g. scipy>=0.12.0). The USE_OLDEST_DEPENDENCIES code has
# looked up the oldest compatible version available on conda (scipy==0.12.0)
# but there is no numpy version for this which matches your constraint.
#
# You can resolve this by doing a search of the conda packages available
# conda search scipy
# and changing your scipy constraint to be scipy>=x.y.z, where x.y.z is the
# oldest version which has a matching numpy version in its buildstring.
# To resolve the example, we look for the first scipy version which has
# 'np19' in its buildstring, and find it is scipy version 0.14.0, so we
# update the requirements.txt file to have 'scipy>=0.14.0' instead of
# 'scipy>=0.12.0'.
#
# Activate the test environment
- source activate testenv
###############################################################################
install:
# Install required packages listed in requirements.txt. We install this
# with the --upgrade flag *and* with --no-deps to make sure we have the
# most up to date version of all *immediate* dependencies of the package
# we are developing (whilst still compatible with the version specifier),
# without upgrading recursively (since that would invariably involve
# upgrading numpy and/or scipy). We then need to make sure the dependency
# "chain" is satisfied (dependencies of dependencies are adequate) without
# upgrading unnecessarily.
- if [ -f requirements.txt ]; then
cat requirements.txt;
pip install --no-deps --upgrade -r requirements.txt;
pip install -r requirements.txt;
fi;
# Also install any developmental requirements, if present.
- if [ -f requirements-dev.txt ]; then
cat requirements-dev.txt;
pip install --no-deps --upgrade -r requirements-dev.txt;
pip install -r requirements-dev.txt;
fi;
- if [ -f requirements-test.txt ]; then
cat requirements-test.txt;
pip install --no-deps --upgrade -r requirements-test.txt;
pip install -r requirements-test.txt;
fi;
# ---------------------------------------------------------------------------
# Now install your own package, e.g.
# - python setup.py install
###############################################################################
before_script:
# Double-check we are still in the right directory
- pwd
# Check what python packages we have installed
- conda info -a
- which python
- python --version
- conda env export > environment.yml && cat environment.yml
- pip freeze
# ---------------------------------------------------------------------------
# Remove any cached results files from previous build, if present
- rm -f testresults.xml;
rm -f coverage.xml;
rm -f .coverage;
# ---------------------------------------------------------------------------
# Set up folders for test results on Shippable
- if [ "$SHIPPABLE" = "true" ]; then
rm -fr shippable;
mkdir -p shippable/testresults;
mkdir -p shippable/codecoverage;
fi;
###############################################################################
script:
# Have to run the tests from the `testing` folder
- cd testing
- python --version;
if [[ "$NUMPY_REQUIREMENT" != "" ]]; then
python -c "import numpy; print('numpy %s' % numpy.__version__)";
fi;
if [[ "$SCIPY_REQUIREMENT" != "" ]]; then
python -c "import scipy; print('scipy %s' % scipy.__version__)";
fi;
# ---------------------------------------------------------------------------
#- py.test --flake8 --cov=../python --cov-report term --cov-report xml --cov-config .coveragerc --junitxml=testresults.xml
- py.test --junitxml=testresults.xml
###############################################################################
after_script:
# Show where we ended up
- pwd
# Go back to the testing directory, just in case
# Show what results files there are
- cd "${REPOPATH}/testing" && ls -alh;
# ---------------------------------------------------------------------------
# Move results and coverage files into appropriate places
- if [ "$SHIPPABLE" = "true" ] && [ -f testresults.xml ]; then
mv testresults.xml shippable/testresults/;
fi;
if [ "$SHIPPABLE" = "true" ] && [ -f coverage.xml ]; then
cp coverage.xml shippable/codecoverage/;
fi;
###############################################################################
after_success:
# Only run coveralls on Travis. When running on a public Travis-CI, the
# repo token is automatically inferred, but to run coveralls on Shippable
# the repo token needs to be specified in a .coveralls.yml or as an
# environment variable COVERALLS_REPO_TOKEN. This should be kept hidden
# from public viewing, either by encrypting the token or running on a
# private build.
# We ignore coveralls failures because the coveralls server is not 100%
# reliable and we don't want the CI to report a failure just because the
# coverage report wasn't published.
- if [ "$TRAVIS" = "true" ] && [ "$SHIPPABLE" != "true" ]; then
pip install coveralls;
travis_retry coveralls || echo "Coveralls push failed";
pip install codecov;
travis_retry codecov || echo "Codecov push failed";
fi;
###############################################################################
# Steps to take before archiving on Shippable (does nothing on Travis)
before_archive:
# Have shippable archive the environment.yml artifact by putting it in
# the REPO/shippable folder. This is available to download as a tar file for
# each build.
# Since this build was successful, you can share it for users to install from
# with the command `conda env create -f environment.yml` and know they have
# a working build.
# If you want to save this file on Travis, you will need to turn on the
# artifacts addon (or do something else with it). See here for details
# https://docs.travis-ci.com/user/uploading-artifacts/
- if [ "$SHIPPABLE" = "true" ] && [ -f environment.yml ]; then
cp environment.yml shippable/;
fi;
###############################################################################
# Enable archiving of artifacts on Shippable (does nothing on Travis)
archive: true