-
Notifications
You must be signed in to change notification settings - Fork 68
163 lines (151 loc) · 6.85 KB
/
performance-tests.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
name: Performance testing
# Run when a PR comment is created (issues and PRs are considered the same entity in the GitHub API)
on:
issue_comment:
types: [created]
# Add some extra perms to comment on a PR
permissions:
pull-requests: write
contents: read
jobs:
run-perftests:
# Make sure 1. this is a PR, not an issue 2. it contains "/run performance test" anywhere in the body
if: github.event.issue.pull_request && contains(github.event.comment.body, '/run performance test')
# Run this on Delphi's self-hosted runner
runs-on: self-hosted
outputs:
request_count: ${{ steps.output.outputs.request_count }}
failure_count: ${{ steps.output.outputs.failure_count }}
med_time: ${{ steps.output.outputs.med_time }}
avg_time: ${{ steps.output.outputs.avg_time }}
min_time: ${{ steps.output.outputs.min_time }}
max_time: ${{ steps.output.outputs.max_time }}
requests_per_sec: ${{ steps.output.outputs.requests_per_sec }}
steps:
- name: Set up WireGuard
uses: egor-tensin/[email protected]
with:
endpoint: '${{ secrets.WG_PERF_ENDPOINT }}'
endpoint_public_key: '${{ secrets.WG_PERF_ENDPOINT_PUBLIC_KEY }}'
ips: '${{ secrets.WG_PERF_IPS }}'
allowed_ips: '${{ secrets.WG_PERF_ALLOWED_IPS }}'
private_key: '${{ secrets.WG_PERF_PRIVATE_KEY }}'
- name: Clean files from previous runs
uses: AutoModality/action-clean@v1
- name: Check out repository
uses: actions/checkout@v3
# Previous step checks out default branch, so we check out the pull request's branch
- name: Switch to PR branch
run: |
hub pr checkout ${{ github.event.issue.number }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Set up repository # mimics install.sh in the README except that delphi is cloned from the PR rather than main
run: |
cd ..
rm -rf driver
mkdir -p driver/repos/delphi
cd driver/repos/delphi
git clone https://github.com/cmu-delphi/operations
git clone https://github.com/cmu-delphi/utils
git clone https://github.com/cmu-delphi/flu-contest
git clone https://github.com/cmu-delphi/nowcast
cd ../../
cd ..
cp -R delphi-epidata driver/repos/delphi/delphi-epidata
cd -
ln -s repos/delphi/delphi-epidata/dev/local/Makefile
- name: Build & run epidata
run: |
cd ../driver
sudo make web sql="${{ secrets.DB_CONN_STRING }}" rate_limit="999999/second"
sudo make redis
- name: Check out delphi-admin
uses: actions/checkout@v3
with:
repository: cmu-delphi/delphi-admin
token: ${{ secrets.CMU_DELPHI_DEPLOY_MACHINE_PAT }}
path: delphi-admin
- name: Build & run Locust
continue-on-error: true # sometimes ~2-5 queries fail, we shouldn't end the run if that's the case
env:
PERFTEST_API_KEY: ${{secrets.PERFTEST_API_KEY}}
run: |
cd delphi-admin/load-testing/locust
docker build -t locust .
export CSV=v4-requests-small.csv
touch output_stats.csv && chmod 666 output_stats.csv
touch output_stats_history.csv && chmod 666 output_stats_history.csv
touch output_failures.csv && chmod 666 output_failures.csv
touch output_exceptions.csv && chmod 666 output_exceptions.csv
docker run --net=host -v $PWD:/mnt/locust -e CSV="/mnt/locust/${CSV}" locust -f /mnt/locust/v4.py --host http://127.0.0.1:10080/ --users 10 --spawn-rate 1 --headless -i "$(cat ${CSV} | wc -l)" --csv=/mnt/locust/output
- name: Produce output for summary
id: output
uses: jannekem/run-python-script-action@v1
with:
script: |
import os
def write_string(name, value):
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
print(f'{name}={value}', file=fh)
def write_float(name, value):
write_string(name, "{:.2f}".format(float(value)))
with open("delphi-admin/load-testing/locust/output_stats.csv", "r", encoding="utf-8", errors="ignore") as scraped:
final_line = scraped.readlines()[-1].split(",")
write_string('request_count', final_line[2])
write_string('failure_count', final_line[3])
write_float('med_time', final_line[4])
write_float('avg_time', final_line[5])
write_float('min_time', final_line[6])
write_float('max_time', final_line[7])
write_float('requests_per_sec', final_line[9])
- name: Archive results as artifacts
uses: actions/upload-artifact@v4
with:
name: locust-output
path: |
delphi-admin/load-testing/locust/output_*.csv
comment-success:
runs-on: ubuntu-latest
if: success()
needs: run-perftests
steps:
- name: Comment run results
env:
GITHUB_WORKFLOW_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
uses: actions/github-script@v5
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `✅ Performance tests complete! Result summary:
- Total requests: **${{ needs.run-perftests.outputs.request_count }}**
- Total failures: **${{ needs.run-perftests.outputs.failure_count }}**
- Min response time: **${{ needs.run-perftests.outputs.min_time }} ms**
- Max response time: **${{ needs.run-perftests.outputs.max_time }} ms**
- Average response time: **${{ needs.run-perftests.outputs.avg_time }} ms**
- Median response time: **${{ needs.run-perftests.outputs.med_time }} ms**
- Requests per second: **${{ needs.run-perftests.outputs.requests_per_sec }}**
Click here to view full results: ${{ env.GITHUB_WORKFLOW_URL }}.`
})
comment-failure:
runs-on: ubuntu-latest
if: failure()
needs: run-perftests
steps:
- name: Comment run results
env:
GITHUB_WORKFLOW_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
uses: actions/github-script@v5
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `❌ Performance tests failed! Click here to view full results: ${{ env.GITHUB_WORKFLOW_URL }}.`
})