forked from JuliaPackaging/Yggdrasil
-
Notifications
You must be signed in to change notification settings - Fork 0
/
azure-pipelines.yml
317 lines (277 loc) · 12.7 KB
/
azure-pipelines.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
# Trigger on pushes to `master`
trigger:
- master
# Trigger on PRs against `master`
pr:
- master
# By default, use the `Native` pool of agents
pool: Native
variables:
JULIA: unbuffer julia --project=$(Build.SourcesDirectory)/.ci --color=yes
# We limit our parallelism somewhat in order to avoid strange OOM errors while building LLVM
BINARYBUILDER_NPROC: 64
jobs:
- job: generator
steps:
- checkout: self
fetchDepth: 99999
clean: true
- bash: |
# Be fragile, like a beautiful porcelain doll
set -e
# Normally we look at the last pushed commit
COMPARE_AGAINST="HEAD~1"
# Keyword to be used in the commit message to skip a rebuild
SKIP_BUILD_COOKIE="[skip build]"
# This variable will tell us whether we want to skip the build
export SKIP_BUILD="false"
if [[ $(Build.Reason) == "PullRequest" ]]; then
# If we're on a PR though, we look at the entire branch at once
TARGET_BRANCH="remotes/origin/$(System.PullRequest.TargetBranch)"
COMPARE_AGAINST=$(git merge-base --fork-point ${TARGET_BRANCH} HEAD)
git fetch origin "refs/pull/$(System.PullRequest.PullRequestNumber)/head:refs/remotes/origin/pr/$(System.PullRequest.PullRequestNumber)"
if [[ "$(git show -s --format=%B origin/pr/$(System.PullRequest.PullRequestNumber))" == *"${SKIP_BUILD_COOKIE}"* ]]; then
SKIP_BUILD="true"
fi
else
if [[ "$(git show -s --format=%B)" == *"${SKIP_BUILD_COOKIE}"* ]]; then
SKIP_BUILD="true"
fi
fi
$(JULIA) -e "using InteractiveUtils; versioninfo()"
# Get the directories holding changed files
# 1. All changed files
# 2. Only files in directories
# 3. dirname
# 4. Unique the directories
PROJECTS=$(git diff-tree --no-commit-id --name-only -r HEAD "${COMPARE_AGAINST}" | grep -E ".+/.+"| sed 's#/[^/]*$##' | sort -u)
# If there are scary projects we need to exclude, we list them here. (Used to contain `LLVM`)
EXCLUDED_NAMES=" "
# This is the dynamic mapping we're going to build up, if it's empty we don't do anything
PROJECTS_ACCEPTED=()
for PROJECT in ${PROJECTS}; do
NAME=$(basename "${PROJECT}")
echo "Considering ${PROJECT}"
# Only accept things that contain a `build_tarballs.jl`
while [[ ! -f "${PROJECT}/build_tarballs.jl" ]] && [[ "${PROJECT}" == */* ]]; do
echo " --> ${PROJECT} does not contain a build_tarballs.jl, moving up a directory"
PROJECT="$(dirname "${PROJECT}")"
done
if [[ "${PROJECT}" != */* ]]; then
echo " --> Skipping as we could not find a build_tarballs.jl"
continue
fi
# Ignore RootFS stuff, we'll do that manually
if [[ "${PROJECT}" == "0_RootFS/"* ]]; then
echo " --> Skipping as it's within 0_RootFS/"
continue
fi
# Ignore stuff in our excluded projects
if [[ "${EXCLUDED_NAMES}" == *" ${NAME} "* ]]; then
echo " --> Skipping as it's excluded"
continue
fi
# Otherwise, emit a build with `PROJECT` set to `${PROJECT}`
if [[ " ${PROJECTS_ACCEPTED[@]} " =~ " ${PROJECT} " ]]; then
echo " --> Already in accepted projects, skipping"
else
echo " --> Accepted!"
PROJECTS_ACCEPTED+=("${PROJECT}")
fi
done
if [[ -n "${PROJECTS_ACCEPTED[@]}" ]]; then
if [[ ${#PROJECTS_ACCEPTED[@]} -gt 20 ]]; then
echo "Too many projects requested"
exit 1
fi
# Next, we're going to ensure that our BB is up to date and precompiled
$(JULIA) -e "import Pkg; Pkg.instantiate(); Pkg.precompile()"
# We're going to snarf out the BB and BBB tree hashes and combine them to be used later in our build cache
BB_HASH=$($(JULIA) -e "using Pkg, SHA; \
gethash(uuid) = collect(Pkg.Types.Context().env.manifest[Pkg.Types.UUID(uuid)].tree_hash.bytes); \
print(bytes2hex(sha256(vcat( \
gethash(\"7f725544-6523-48cd-82d1-3fa08ff4056e\"), \
gethash(\"12aac903-9f7c-5d81-afc2-d9565ea332ae\"), \
))));")
# Next, for each project, download its sources. We do this by generating meta.json
# files, then parsing them with `download_sources.jl`
for PROJECT in "${PROJECTS_ACCEPTED[@]}"; do
NAME=$(basename ${PROJECT})
# We always invoke a `build_tarballs.jl` file from its own directory
pushd ${PROJECT} >/dev/null
echo "Generating meta.json..."
JSON_PATH="$(Agent.TempDirectory)/${NAME}.meta.json"
$(JULIA) --compile=min ./build_tarballs.jl --meta-json="${JSON_PATH}"
echo "Downloading sources..."
$(JULIA) $(Build.SourcesDirectory)/.ci/download_sources.jl "${JSON_PATH}" $(Agent.TempDirectory)/${NAME}.platforms.list
# Pop back up to the overworld
popd >/dev/null
done
# Emit project variable declarations
for PROJECT in "${PROJECTS_ACCEPTED[@]}"; do
NAME=$(basename ${PROJECT})
done
# Emit project/platform joint variable declarations
VAR_PROJECTS="##vso[task.setVariable variable=projects;isOutput=true]{"
VAR_PROJPLATFORMS="##vso[task.setVariable variable=projplatforms;isOutput=true]{"
echo "Determining builds to queue..."
for PROJECT in "${PROJECTS_ACCEPTED[@]}"; do
NAME=$(basename ${PROJECT})
# "project source hash" is a combination of meta.json (to absorb
# changes from include()'ing a `common.jl`) as well as the entire
# tree the project lives in (to absorb changes from patches)
TREE_HASH=$($(JULIA) -e "using Pkg; print(bytes2hex(Pkg.GitTools.tree_hash(\"${PROJECT}\")))")
META_HASH=$(shasum -a 256 "$(Agent.TempDirectory)/${NAME}.meta.json" | cut -d' ' -f1)
PROJ_HASH=$(echo -n ${TREE_HASH}${META_HASH} | shasum -a 256 | cut -d' ' -f1)
# Load in the platforms
PLATFORMS=$(cat $(Agent.TempDirectory)/${NAME}.platforms.list)
if [[ -z "${PLATFORMS}" ]]; then
echo "##vso[task.logissue]error Unable to determine the proper platforms for ${NAME}"
continue
fi
# That's everything we need to know for `$(PROJECTS)` later on down
VAR_PROJECTS="${VAR_PROJECTS} '${NAME}':{ \
'NAME': '${NAME}', \
'PROJECT':'${PROJECT}', \
'PLATFORMS':'${PLATFORMS}', \
'BB_HASH':'${BB_HASH}', \
'PROJ_HASH':'${PROJ_HASH}', \
'SKIP_BUILD':'${SKIP_BUILD}' \
}, "
# Some debugging info
echo " ---> ${NAME}: ${BB_HASH}/${PROJ_HASH} (${TREE_HASH} + ${META_HASH})"
# For $(PROJPLATFORMS)`, we need to know more...
for PLATFORM in ${PLATFORMS}; do
if [[ "${SKIP_BUILD}" == "true" ]]; then
echo "The commit messages contains ${SKIP_BUILD_COOKIE}, skipping build"
break
fi
# Here, we hit the build cache to see if we can skip this particular combo
CACHE_URL="https://julia-bb-buildcache.s3.amazonaws.com/${BB_HASH}/${PROJ_HASH}/${PLATFORM}.tar.gz"
CURL_HTTP_CODE=$(curl --output /tmp/curl_${PROJ_HASH}_${PLATFORM}.log --silent --include --HEAD "${CACHE_URL}" --write-out '%{http_code}')
if [[ "${CURL_HTTP_CODE}" == "200" ]]; then
echo " ${PLATFORM}: skipping, existant"
continue;
fi
echo " ${PLATFORM}: building"
# # Debugging: let's see why `curl` failed:
# echo "CACHE_URL: ${CACHE_URL}"
# cat /tmp/curl_${PROJ_HASH}_${PLATFORM}.log || true
# Otherwise, emit the build
VAR_PROJPLATFORMS="${VAR_PROJPLATFORMS} '${NAME}-${PLATFORM}':{ \
'NAME': '${NAME}', \
'PROJECT':'${PROJECT}', \
'PLATFORM':'${PLATFORM}', \
'PROJ_HASH':'${PROJ_HASH}', \
'BB_HASH':'${BB_HASH}' \
}, "
done
rm -f /tmp/curl_${PROJ_HASH}*.log
done
# Add closing parens
VAR_PROJECTS="${VAR_PROJECTS} }"
VAR_PROJPLATFORMS="${VAR_PROJPLATFORMS} }"
# Actually output the variables
echo "${VAR_PROJECTS}"
echo "${VAR_PROJPLATFORMS}"
fi
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
name: mtrx
- job: jll_init
dependsOn: generator
timeoutInMinutes: 10
cancelTimeoutInMinutes: 2
strategy:
matrix: $[ dependencies.generator.outputs['mtrx.projects'] ]
variables:
projects: $[ dependencies.generator.outputs['mtrx.projects'] ]
steps:
- script: |
# Fail on error
set -e
cd $(PROJECT)
echo "Generating meta.json..."
$(JULIA) --compile=min ./build_tarballs.jl --meta-json=$(Agent.TempDirectory)/$(NAME).meta.json
echo "Initializing JLL package..."
$(JULIA) $(Build.SourcesDirectory)/.ci/jll_init.jl "$(Agent.TempDirectory)/${NAME}.meta.json"
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
displayName: "initialize JLL package"
condition: and(and(ne(variables['Build.Reason'], 'PullRequest'), eq(variables['Build.SourceBranch'], 'refs/heads/master')), ne(variables['projects'], ''))
- job: build
dependsOn:
- generator
- jll_init
timeoutInMinutes: 240
cancelTimeoutInMinutes: 2
strategy:
matrix: $[ dependencies.generator.outputs['mtrx.projplatforms'] ]
variables:
projplatforms: $[ dependencies.generator.outputs['mtrx.projplatforms'] ]
steps:
- script: |
# Fail on error
set -e
# Cleanup temporary things that might have been left-over
./clean_builds.sh
./clean_products.sh
cd $(PROJECT)
$(JULIA) ./build_tarballs.jl --verbose $(PLATFORM)
# After building, we take the single tarball produced with the proper NAME, and upload it:
TARBALLS=( ./products/${NAME%@*}*${PLATFORM}*.tar.gz )
if [[ "${#TARBALLS[@]}" != 1 ]]; then
echo "Multiple tarballs? This isn't right!" >&2
exit 1
fi
# Upload with curl
ACL="x-amz-acl:public-read"
CONTENT_TYPE="application/x-gtar"
BUCKET="julia-bb-buildcache"
BUCKET_PATH="${BB_HASH}/${PROJ_HASH}/${PLATFORM}.tar.gz"
DATE="$(date -R)"
S3SIGNATURE=$(echo -en "PUT\n\n${CONTENT_TYPE}\n${DATE}\n${ACL}\n/${BUCKET}/${BUCKET_PATH}" | openssl sha1 -hmac "${S3SECRET}" -binary | base64)
HOST="${BUCKET}.s3.amazonaws.com"
echo "Uploading artifact to https://${HOST}/${BUCKET_PATH}"
curl -X PUT -T "${TARBALLS[0]}" \
-H "Host: ${HOST}" \
-H "Date: ${DATE}" \
-H "Content-Type: ${CONTENT_TYPE}" \
-H "${ACL}" \
-H "Authorization: AWS ${S3KEY}:${S3SIGNATURE}" \
"https://${HOST}/${BUCKET_PATH}"
if [[ "$?" != 0 ]]; then
echo "Failed to upload artifact!" >&2
exit 1
fi
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
S3KEY: $(S3KEY)
S3SECRET: $(S3SECRET)
BINARYBUILDER_NPROC: $(BINARYBUILDER_NPROC)
displayName: "run build_tarballs.jl"
condition: and(ne(variables['projplatforms'], ''), ne(variables['projplatforms'], '{ }'))
- job: register
dependsOn:
- generator
- build
strategy:
matrix: $[ dependencies.generator.outputs['mtrx.projects'] ]
maxParallel: 1
variables:
projects: $[ dependencies.generator.outputs['mtrx.projects'] ]
steps:
- script: |
# Fail on error
set -e
cd $(PROJECT)
echo "Generating meta.json..."
$(JULIA) --compile=min ./build_tarballs.jl --meta-json=$(Agent.TempDirectory)/$(NAME).meta.json
echo "Registering $(NAME)..."
export BB_HASH PROJ_HASH
$(JULIA) $(Build.SourcesDirectory)/.ci/register_package.jl $(Agent.TempDirectory)/$(NAME).meta.json --verbose
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
displayName: "register JLL package"
# We only register if this is on `master`; same as setting `${DEPLOY}` above.
condition: and(and(ne(variables['Build.Reason'], 'PullRequest'), eq(variables['Build.SourceBranch'], 'refs/heads/master')), ne(variables['projects'], ''))