-
Notifications
You must be signed in to change notification settings - Fork 0
/
bitbucket-pipelines.yml
121 lines (119 loc) · 5.87 KB
/
bitbucket-pipelines.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
image: node:16.18.0
definitions:
steps:
- step: &install-e2e-tests-server-on-test-geoportal
name: install masterportals master, basic etc. on test.geoportal
caches:
- node
artifacts:
- dist/**
script:
- export FTP_USERNAME="$LGV_TEST_FTP_USERNAME"
- export FTP_PASSWORD="$LGV_TEST_FTP_PASSWORD"
- export FTP_SOURCE_PATH="dist"
- export FTP_TARGET_URL="ftpes://test.geoportal-hamburg.de/"
- export FTP_GITLOGFILE="testGitLog"
#replace slashes in branchname with underscores
- tmp=$BITBUCKET_BRANCH
- branchname=${tmp//[\/]/_}
- export FTP_UNTRACKED_PATH=$'!dist/master_'$branchname$'\n!dist/basic_'$branchname$'\n!dist/masterCustom_'$branchname$'\n!dist/masterDefault_'$branchname$'\n!dist/mastercode'
- git clone [email protected]:geowerkstatt-hamburg/ci_scripts.git
- mv ./ci_scripts/deployScripts ./deployScripts # mv deployScripts scripts to masterportal root
- rm -rf ./ci_scripts
- rm -rf ./addons/readme.txt
# because of use of "git-ftp" for transfering files every change in repository has to be comittet, see https://git-ftp.github.io/
- git commit ./addons/readme.txt -m "deleted before add addons in e2e test run"
- git clone https://[email protected]/geowerkstatt-hamburg/addons.git
- cd addons
- find . -maxdepth 2 -name package.json -execdir npm i \;
- cd ..;
- npm install
- npm ls
- npm run buildPortal
- apt-get update && apt-get install unzip
# make script executable
- chmod +x ./deployScripts/transfer_ftp.sh
# commit changed file for "git-ftp"
- /bin/bash ./deployScripts/transfer_ftp.sh
pipelines:
# Start pipeline with tag to build examples zips and copy them to the Bitbucket download folder
tags:
'v*.*.*':
- step:
name: build examples zip
caches:
- node
artifacts:
- dist/**
script:
- npm install
- npm run buildExamples
- apt-get update
- step:
name: deploy examples zip
caches:
- node
script:
- apt-get install -y curl
# find all zip folders in dist folder, loop through the results and post the zips to the BB download folder
- for zipFilePath in $(find dist -name \*.zip); do
curl -X POST --user "$bbapi_name:$bbapi_key" "https://api.bitbucket.org/2.0/repositories/geowerkstatt-hamburg/masterportal/downloads" --form files=@"$zipFilePath" --verbose;
done
- step:
name: publish docs
caches:
- node
artifacts:
- docHtml/**
script:
- npm install
- npm run buildMdDoc
- apt-get update
- step:
name: deploy files
caches:
- node
script:
- apt-get update && apt-get install -y curl wput jq
# Get taglist sorted by date, take the penultimate tag name and remove quotation marks
- lastTag=$(curl --request GET --user "$bbapi_name:$bbapi_key" --url "https://api.bitbucket.org/2.0/repositories/geowerkstatt-hamburg/masterportal/refs/tags?sort=-target.date" --header "Accept:application/json" | jq .values[2].name | tr -d '"')
- echo $lastTag
# create folder to store the last latest files from server
- mkdir $lastTag && cd $lastTag
- wget --user $DOC_USER --password $DOC_PASSWORD --no-parent -r $DOC_URLLatest
- cd ..
# find lastTag and docHtml folder , push the files to the masterportal.org folder
- find $lastTag -type f -exec curl --insecure --user "$DOC_USER:$DOC_PASSWORD" --ftp-create-dirs -T {} "$DOC_URL$lastTag/" --verbose \;
- find docHtml -type f -exec curl --insecure --user "$DOC_USER:$DOC_PASSWORD" --ftp-create-dirs -T {} "$DOC_URLLatest" --verbose \;
- step:
name: deploy on openCode
script:
- git push https://$OpenCode_User:[email protected]/geowerkstatt-hamburg/masterportal.git $BITBUCKET_TAG
- git push https://$OpenCode_User:[email protected]/geowerkstatt-hamburg/masterportal.git dev -f
custom: # Pipelines that are triggered manually
End2End-Tests on saucelabs:
- step: *install-e2e-tests-server-on-test-geoportal
- step:
name: run e2e tests on saucelabs url=test.geoportal
caches:
- node
script:
# run e2e tests
- export url=https://test.geoportal-hamburg.de
- npm run browsertestOnExternalServer --testservice=saucelabs
End2End-Tests on saucelabs/ use existing server:
- step:
name: run e2e tests on saucelabs url=test.geoportal_buildname
caches:
- node
script:
- export url=https://test.geoportal-hamburg.de
- npm run browsertestOnExternalServer --testservice=saucelabs
npm audit:
- step:
name: npm audit (security)
caches:
- node
script:
- npm install
- npm audit --json