forked from github/docs
-
Notifications
You must be signed in to change notification settings - Fork 0
/
check-s3-images.js
executable file
·157 lines (120 loc) · 5.21 KB
/
check-s3-images.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
#!/usr/bin/env node
const path = require('path')
const { chain, difference } = require('lodash')
const { loadPages } = require('../lib/pages')
const loadSiteData = require('../lib/site-data')
const renderContent = require('../lib/render-content')
const allVersions = require('../lib/all-versions')
const nonEnterpriseDefaultVersion = require('../lib/non-enterprise-default-version')
const { getS3BucketPathFromVersion, getVersionFromS3BucketPath } = require('../lib/s3-bucket-path-utils')
const patterns = require('../lib/patterns')
const authenticateToAWS = require('../lib/authenticate-to-aws.js')
const readlineSync = require('readline-sync')
const { execSync } = require('child_process')
const enterpriseServerVersions = Object.keys(allVersions).filter(v => v.startsWith('enterprise-server@'))
const uploadScript = path.join(process.cwd(), 'script/upload-images-to-s3.js')
// ignore the non-enterprise default version
const versionsToCheck = Object.keys(allVersions)
.filter(version => version !== nonEnterpriseDefaultVersion)
// [start-readme]
//
// Run this script in your branch to check whether any images referenced in content are
// not in an expected S3 bucket. You will need to authenticate to S3 via `awssume` to use this script.
// Instructions for the one-time setup are at docs-content/doc-team-workflows/workflow-information-for-all-writers/setting-up-awssume-and-s3cmd.md
//
// [end-readme]
main()
async function main () {
const s3 = await authenticateToAWS()
console.log('Working...\n')
const pages = await getEnglishPages()
const siteData = await getEnglishSiteData()
const s3References = []
for (const version of versionsToCheck) {
for (const page of pages) {
// skip page if it doesn't have a permalink for the current version
if (!page.permalinks.some(permalink => permalink.pageVersion === version)) continue
// skip index pages because they do not contain images
if (page.relativePath.endsWith('index.md')) continue
// build fake context object for rendering the page
page.version = version
const context = {
page,
site: siteData,
currentVersion: version,
currentLanguage: 'en',
enterpriseServerVersions
}
const rendered = await renderContent(page.markdown, context)
const imageReferences = rendered.match(patterns.imagePath)
if (!imageReferences) continue
const bucketPath = getS3BucketPathFromVersion(version)
imageReferences.forEach(ref => {
s3References.push(`${bucketPath}${ref}`)
})
}
}
// store all images referenced in Enterprise content
const s3ReferencesToCheck = chain(s3References).uniq().sort().value()
console.log(`Found ${s3ReferencesToCheck.length} images referenced in S3-eligible content in the current checkout.\n`)
console.log('Checking the github-images S3 bucket...\n')
const imagesOnS3 = []
// now look for the images on S3
for (const version of versionsToCheck) {
const bucketPath = getS3BucketPathFromVersion(version)
imagesOnS3.push(await listObjects(s3, bucketPath, imagesOnS3))
}
// store all found images on s3
const allImagesOnS3 = chain(imagesOnS3).flatten().uniq().sort().value()
const imagesMissingFromS3 = difference(s3ReferencesToCheck, allImagesOnS3)
// return early if there are no missing images
if (!imagesMissingFromS3.length) {
console.log('All images are in S3 that should be!')
return
}
console.log(`${imagesMissingFromS3.length} images are missing from S3:\n\n${imagesMissingFromS3.join('\n')}`)
const prompt = `\nDo you want to try to upload these images to S3 from your local checkout?
\nPress Y to continue, or press enter any other key to cancel: `
const answer = readlineSync.question(prompt)
if (!answer.match(/^Y$/mi)) {
console.log('Exiting!')
process.exit()
}
console.log('Trying to upload...\n')
imagesMissingFromS3.forEach(missingImage => {
// given an s3 path like `enterprise/2.19/assets/images/foo.png`,
// find the version `[email protected]` and the local path `assets/images/foo.png`,
// then attempt to upload the file using the upload script
const version = getVersionFromS3BucketPath(missingImage)
const assetPath = missingImage.replace(/.+?assets/, 'assets')
const result = execSync(`${uploadScript} --single ${assetPath} --version ${version}`)
console.log(result.toString())
})
console.log('Done uploading! Checking S3 again.')
main()
}
async function getEnglishPages () {
const pages = await loadPages()
return pages.filter(page => page.languageCode === 'en')
}
async function getEnglishSiteData () {
const siteData = await loadSiteData()
return siteData.en.site
}
async function listObjects (s3, bucketPath, imagesOnS3, token) {
const params = {
Bucket: 'github-images',
StartAfter: bucketPath
}
if (token) params.ContinuationToken = token
const data = await s3.listObjectsV2(params).promise()
const matchingKeys = data.Contents
.map(obj => obj.Key)
.filter(imageFile => imageFile.startsWith(bucketPath))
if (!matchingKeys.length) return []
imagesOnS3.push(matchingKeys)
if (data.IsTruncated) {
await listObjects(s3, bucketPath, imagesOnS3, data.NextContinuationToken)
}
return imagesOnS3
}