forked from snowplow/snowplow
-
Notifications
You must be signed in to change notification settings - Fork 0
/
ci.bash
executable file
·260 lines (212 loc) · 9.34 KB
/
ci.bash
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
#!/bin/bash
set -e
# Constants
bintray_package=snowplow
bintray_artifact_prefix=snowplow_kinesis_
bintray_user=$BINTRAY_SNOWPLOW_GENERIC_USER
bintray_api_key=$BINTRAY_SNOWPLOW_GENERIC_API_KEY
bintray_repository=snowplow/snowplow-generic
scala_version=2.10
dist_path=dist
root=$(pwd)
# Next four arrays MUST match up: number of elements and order
declare -a kinesis_app_packages=( "snowplow-scala-stream-collector" "snowplow-stream-enrich" "snowplow-kinesis-elasticsearch-sink")
declare -a kinesis_app_paths=( "2-collectors/scala-stream-collector" "3-enrich/stream-enrich" "4-storage/kinesis-elasticsearch-sink" )
declare -a kinesis_fatjars=( "snowplow-stream-collector" "snowplow-stream-enrich" "snowplow-elasticsearch-sink" )
# TODO: version numbers shouldn't be hard-coded
declare -a kinesis_app_versions=( "0.7.0" "0.8.1" "0.7.0")
# Similar to Perl die
function die() {
echo "$@" 1>&2 ; exit 1;
}
# Go to parent-parent dir of this script
function cd_root() {
cd $root
}
# Assemble our fat jars
function assemble_fatjars() {
for kinesis_app_path in "${kinesis_app_paths[@]}"
do
:
app="${kinesis_app_path##*/}"
echo "================================================"
echo "ASSEMBLING FATJAR FOR ${app}"
echo "------------------------------------------------"
cd ${kinesis_app_path} && sbt assembly
cd_root
done
}
# Creates BinTray versions
#
# Parameters:
# 1. package_names
# 2. package_versions
# 3. out_error (out parameter)
function create_bintray_versions() {
[ "$#" -eq 3 ] || die "3 arguments required, $# provided"
local __package_names=$1[@]
local __package_versions=$2[@]
local __out_error=$3
package_names=("${!__package_names}")
package_versions=("${!__package_versions}")
for i in "${!package_names[@]}"
do
:
package_name="${package_names[$i]}"
package_version="${package_versions[$i]}"
echo "========================================"
echo "CREATING BINTRAY VERSION ${package_version} in package ${package_name}*"
echo "* if it doesn't already exist"
echo "----------------------------------------"
http_status=`echo '{"name":"'${package_version}'","desc":"Release of '${package_name}'"}' | curl -d @- \
"https://api.bintray.com/packages/${bintray_repository}/${package_name}/versions" \
--write-out "%{http_code}\n" --silent --output /dev/null \
--header "Content-Type:application/json" \
-u${bintray_user}:${bintray_api_key}`
http_status_class=${http_status:0:1}
ok_classes=("2" "3")
if [ ${http_status} == "409" ] ; then
echo "... version ${package_version} in package ${package_name} already exists, skipping."
elif [[ ! ${ok_classes[*]} =~ ${http_status_class} ]] ; then
eval ${__out_error}="'BinTray API response ${http_status} is not 409 (package already exists) nor in 2xx or 3xx range'"
break
fi
done
}
# Zips all of our applications into a meta zip
#
# Parameters:
# 1. artifact_version
# 2. out_artifact_name (out parameter)
# 3. out_artifact_[atj] (out parameter)
function build_meta_artifact() {
[ "$#" -eq 3 ] || die "3 arguments required, $# provided"
local __artifact_version=$1
local __out_artifact_name=$2
local __out_artifact_path=$3
artifact_root="${bintray_artifact_prefix}${__artifact_version}"
meta_artifact_name=`echo ${artifact_root}.zip|tr '-' '_'`
echo "==========================================="
echo "BUILDING ARTIFACT ${meta_artifact_name}"
echo "-------------------------------------------"
artifact_folder=./${dist_path}/${artifact_root}
mkdir -p ${artifact_folder}
for i in "${!kinesis_app_paths[@]}"
do
:
kinesis_fatjar="${kinesis_fatjars[$i]}-${kinesis_app_versions[$i]}"
fatjar_path="./${kinesis_app_paths[$i]}/target/scala-${scala_version}/${kinesis_fatjar}"
[ -f "${fatjar_path}" ] || die "Cannot find required fatjar: ${fatjar_path}. Did you forget to update fatjar versions?"
cp ${fatjar_path} ${artifact_folder}
done
meta_artifact_path=./${dist_path}/${meta_artifact_name}
zip -rj ${meta_artifact_path} ${artifact_folder}
eval ${__out_artifact_name}=${meta_artifact_name}
eval ${__out_artifact_path}=${meta_artifact_path}
}
# Zips all of the individual applications
#
# Parameters:
# 1. out_artifact_names (out parameter)
# 2. out_artifact_paths (out parameter)
function build_single_artifacts() {
[ "$#" -eq 2 ] || die "2 arguments required, $# provided"
local __out_artifact_names=$1
local __out_artifact_paths=$2
single_artifact_names=()
single_artifact_paths=()
for i in "${!kinesis_app_paths[@]}"
do
:
kinesis_fatjar="${kinesis_fatjars[$i]}-${kinesis_app_versions[$i]}"
# Create artifact folder
artifact_root="${kinesis_fatjar}"
artifact_name=`echo ${kinesis_fatjar}.zip|tr '-' '_'`
echo "==========================================="
echo "BUILDING ARTIFACT ${artifact_name}"
echo "-------------------------------------------"
artifact_folder=./${dist_path}/${artifact_root}
mkdir -p ${artifact_folder}
# Copy artifact to folder
fatjar_path="./${kinesis_app_paths[$i]}/target/scala-${scala_version}/${kinesis_fatjar}"
[ -f "${fatjar_path}" ] || die "Cannot find required fatjar: ${fatjar_path}. Did you forget to update fatjar versions?"
cp ${fatjar_path} ${artifact_folder}
# Zip artifact
artifact_path=./${dist_path}/${artifact_name}
zip -rj ${artifact_path} ${artifact_folder}
single_artifact_names+=($artifact_name)
single_artifact_paths+=($artifact_path)
done
}
# Uploads seperated artifacts to BinTray
#
# Parameters:
# 1. artifact_names
# 2. artifact_paths
# 3. package_names
# 4. package_versions
# 5. out_error (out parameter)
function upload_artifacts_to_bintray() {
[ "$#" -eq 5 ] || die "5 arguments required, $# provided"
local __artifact_names=$1[@]
local __artifact_paths=$2[@]
local __package_names=$3[@]
local __package_versions=$4[@]
local __out_error=$5
_artifact_names=("${!__artifact_names}")
_artifact_paths=("${!__artifact_paths}")
_package_names=("${!__package_names}")
_package_versions=("${!__package_versions}")
echo "==============================="
echo "UPLOADING ARTIFACTS TO BINTRAY*"
echo "* 5-10 minutes"
echo "-------------------------------"
for i in "${!_artifact_names[@]}"
do
:
artifact_name="${_artifact_names[$i]}"
artifact_path="${_artifact_paths[$i]}"
package_name="${_package_names[$i]}"
package_version="${_package_versions[$i]}"
echo "Uploading ${artifact_name} to package ${package_name} under version ${package_version}..."
# Check if version file already exists
uploaded_files=`curl \
"https://api.bintray.com/packages/${bintray_repository}/${package_name}/versions/${package_version}/files/" \
--silent \
-u${bintray_user}:${bintray_api_key}`
uploaded_file_exists=`echo ${uploaded_files} | python -c \
"exec(\"import sys,json\\nobj=json.load(sys.stdin)\\nfor item in obj:\\n if '${artifact_name}' == item['name']: print 'true'; break\")"`
# If file exists within version skip
if [ "${uploaded_file_exists}" == "true" ] ; then
echo "... file already exists in version, skipping."
continue
fi
# If file not yet uploaded
http_status=`curl -T ${artifact_path} \
"https://api.bintray.com/content/${bintray_repository}/${package_name}/${package_version}/${artifact_name}?publish=1&override=0" \
-H "Transfer-Encoding: chunked" \
--write-out "%{http_code}\n" --silent --output /dev/null \
-u${bintray_user}:${bintray_api_key}`
http_status_class=${http_status:0:1}
ok_classes=("2" "3")
if [[ ! ${ok_classes[*]} =~ ${http_status_class} ]] ; then
eval ${__out_error}="'BinTray API response ${http_status} is not in 2xx or 3xx range'"
break
fi
done
}
cd_root
version=$1
assemble_fatjars
package_names=(${kinesis_app_packages[@]} ${bintray_package})
package_versions=(${kinesis_app_versions[@]} ${version})
create_bintray_versions "package_names" "package_versions" "error"
[ "${error}" ] && die "Error creating version: ${error}"
single_artifact_names=() && single_artifact_paths=() && build_single_artifacts "single_artifact_names" "single_artifact_paths"
meta_artifact_name="" && meta_artifact_path="" && build_meta_artifact "${version}" "meta_artifact_name" "meta_artifact_path"
artifact_names=(${single_artifact_names[@]} ${meta_artifact_name})
artifact_paths=(${single_artifact_paths[@]} ${meta_artifact_path})
upload_artifacts_to_bintray "artifact_names" "artifact_paths" "package_names" "package_versions" "error"
if [ "${error}" != "" ]; then
die "Error uploading package: ${error}"
fi