Skip to content

Commit

Permalink
Extend API docs test to cover more pages (#4996)
Browse files Browse the repository at this point in the history
  • Loading branch information
sean1588 authored Jul 24, 2024
1 parent bf2e4d4 commit 720fe2d
Show file tree
Hide file tree
Showing 7 changed files with 639 additions and 93 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,4 @@ public
cypress/videos
cypress/screenshots
tools/resourcedocsgen/resourcedocsgen
ctrf/
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -64,4 +64,4 @@ check_links:
.PHONY: run-browser-tests
run-browser-tests:
$(MAKE) ensure
node ./scripts/run-browser-tests.js https://www.pulumi.com api-docs.cy.js
./scripts/run-api-docs-tests.sh
13 changes: 10 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,22 +13,29 @@
"@slack/web-api": "^5.12.0",
"aws-sdk": "^2.1567.0",
"broken-link-checker": "^0.7.8",
"cheerio": "^1.0.0-rc.12",
"concurrently": "^6.0.0",
"cssnano": "^5.0.8",
"cypress": "^13.6.4",
"glob": "^7.2.0",
"htmlparser2": "^9.1.0",
"http-server": "^0.12.1",
"js-yaml": "^4.1.0",
"jsdom": "^22.0.0",
"markdownlint": "^0.28.0",
"mocha": "^10.6.0",
"postcss": "^8.3.7",
"postcss-cli": "^8.3.1",
"sitemapper": "^3.2.2"
"sitemapper": "^3.2.2",
"util": "^0.12.5"
},
"scripts": {
"minify-css": "node scripts/minify-css.js"
"minify-css": "node scripts/minify-css.js",
"test-api-docs": "mocha scripts/tests --reporter mocha-ctrf-json-reporter"
},
"devDependencies": {
"cypress-multi-reporters": "^1.6.4"
"chai": "4.2.0",
"cypress-multi-reporters": "^1.6.4",
"mocha-ctrf-json-reporter": "^0.0.4"
}
}
8 changes: 8 additions & 0 deletions scripts/run-api-docs-tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
#!/bin/bash
# Get current website bucket.
bucket="$(curl -s https://www.pulumi.com/registry/metadata.json | jq -r '.bucket' || echo '')"

echo "Downloading current site to public dir...."
aws s3 cp "s3://${bucket}/registry" public/registry --recursive --quiet

node ./scripts/run-browser-tests.js
166 changes: 96 additions & 70 deletions scripts/run-browser-tests.js
Original file line number Diff line number Diff line change
@@ -1,76 +1,95 @@
const cp = require('child_process');
const fs = require('fs');
const AWS = require('aws-sdk');

// Extract command-line arguments
const args = process.argv.slice(2); // Remove first two elements (node and script path)

// Assuming you're passing two arguments: arg1 and arg2
const baseUrl = args[0];
const spec = args[1];

if (!(args[0] && args[1])) {
console.error("baseURL and spec file are required as arguments")
process.exit(1)
}

cp.exec(`CYPRESS_BASE_URL="${baseUrl}" yarn run cypress run --headless --reporter cypress-multi-reporters --spec "./cypress/e2e/${spec}"`, processResults)

async function processResults(error, stdout, stderr) {
// log full run output to runner console.
console.log(stdout);

// EXTREME HACK: Extracts JSON out of console output.
const startIndex = stdout.indexOf("{");
const endIndex = stdout.lastIndexOf("}");
const jsonString = stdout.substring(startIndex, endIndex + 1);
const results = JSON.parse(jsonString);
const transformed = transformResults(results)
// Log JSON structure to the console.
console.log(transformed);
await pushS3(transformed);
const util = require("util");
const exec = util.promisify(require("child_process").exec);
const fs = require("fs");
const AWS = require("aws-sdk");

const pkgs = ["aws", "azure", "gcp"];

const results = {
tests: 0,
passes: 0,
failures: 0,
start: 0,
end: 0,
duration: 0,
failedPages: [],
failedPageCount: 0,
};

const testRuns = pkgs.map((pkg) => {
return exec(`npm run test-api-docs -- --pkg=${pkg} || true`).then(
(stdout, stderr) => {
console.log(stdout);
processJSON(stdout, stderr);
},
);
});

Promise.all(testRuns).then(async () => {
console.log(results);
await pushResultsS3(results);
});

function processJSON(stdout, stderr) {
const contents = fs.readFileSync("ctrf/ctrf-report.json", {
encoding: "utf8",
});
const results = JSON.parse(contents);
transformResults(results);

// The cli command error is trapped here since it is a sub process of this script.
// Checking if error here will enable us to mark this as a failed run by exiting
// unsuccessfully. Otherwise the run will always pass successfully regardless of
// the result of the cypress tests.
if (error) {
// the result of the tests.
if (stderr) {
console.error("errors:", stderr);
process.exit(1)
process.exit(1);
}
}

function transformResults(results) {
const result = results.stats;
result["ghRunURL"] = getRunUrl();
function transformResults(res) {
const summary = res.results.summary;
results.tests += summary.tests;
results.passes += summary.passed;
results.failures += summary.failed;
results.start += summary.start;
results.end += summary.stop;
results.duration = summary.stop - summary.start + results.duration;
results.ghRunURL = getRunUrl();

// Get list of failed pages.
const failedPages = [];
// iterate over test failures and add page url to failedPages array.
results.failures.forEach(f => {
failedPages.push(extractPageUrl(f.fullTitle))
})
res.results.tests
.filter((t) => t.status !== "passed")
.forEach((f) => {
failedPages.push(extractPageUrl(f.name));
});

// dedupe pages and keep track of a count for each page failure occurance to map
// the page to the number of failure occurances for each page.
const pageMap = {}
failedPages.forEach(page => {
const pageMap = {};
failedPages.forEach((page) => {
pageMap[page] = (pageMap[page] || 0) + 1;
})

// Convert pageMap to an array of objects with failure counts. This
result["failedPages"] = Object.keys(pageMap).map(key => ({
page: key,
failures: pageMap[key],
tests: result.tests/50,
}));

result["failedPageCount"] = Object.keys(pageMap).length;
return result;
});

// Convert pageMap to an array of objects with failure counts. This
results.failedPages = [
...results.failedPages,
...Object.keys(pageMap).map((key) => ({
page: key,
failures: pageMap[key],
tests: 15,
})),
];

results.failedPageCount =
Object.keys(pageMap).length + results.failedPageCount;
return results;
}

function extractPageUrl(msg) {
const urlRegex = /(https?:\/\/[^)]+)/;
const urlRegex = /^(https?:\/\/[^)]+\/)/;
const match = msg.match(urlRegex);
const url = match ? match[0] : null;

Expand All @@ -83,43 +102,50 @@ function getRunUrl() {
const runId = process.env.GITHUB_RUN_ID;
const runAttempt = process.env.GITHUB_RUN_ATTEMPT;

return serverUrl + "/" + repo + "/actions/runs/" + runId + "/attempts/" + runAttempt;
return (
serverUrl +
"/" +
repo +
"/actions/runs/" +
runId +
"/attempts/" +
runAttempt
);
}

// expand date string to return yyyy, mm, dd
function expandDate(dateString) {
const date = new Date(dateString);
const year = date.getFullYear();
const monthNumber = date.getMonth() + 1 // zero indexed, so add 1 to get month number
const month = `${ monthNumber < 10 ? `0${monthNumber}` : monthNumber }`;
const monthNumber = date.getMonth() + 1; // zero indexed, so add 1 to get month number
const month = `${monthNumber < 10 ? `0${monthNumber}` : monthNumber}`;
const dayNumber = date.getDate();
const day = `${ dayNumber < 10 ? `0${dayNumber}` : dayNumber }`;
return {year, month, day};
const day = `${dayNumber < 10 ? `0${dayNumber}` : dayNumber}`;
return { year, month, day };
}

// upload to S3 with yyyy/mm/dd prefix.
async function pushS3(obj) {
const bucketName = 'pulumi-api-docs-e2e-test-results-prodution';
async function pushResultsS3(obj) {
const bucketName = "pulumi-api-docs-e2e-test-results-prodution";

const jsonData = JSON.stringify(obj);
// extract yyyy, mm, dd from date.
const {year, month, day} = expandDate(obj.start);
const { year, month, day } = expandDate(obj.start);

// Write JSON to file
const filename = 'results.json';
const filename = "results.json";
fs.writeFileSync(filename, jsonData);

// Upload JSON file to S3 bucket
console.log("pushing to S3")
console.log("pushing to S3");
const s3 = new AWS.S3();
const key = `${year}/${month}/${day}/results.json`;
const key = `${year}/${month}/${day}/results.json`;

const uploadParams = {
Bucket: bucketName,
Key: key,
Body: fs.createReadStream(filename)
Body: fs.createReadStream(filename),
};

return s3.upload(uploadParams).promise();
}

Loading

0 comments on commit 720fe2d

Please sign in to comment.