Skip to content

Commit

Permalink
Clean up code a bit - don't litter primary hooks quite so severely.
Browse files Browse the repository at this point in the history
  • Loading branch information
josh-heyer committed Sep 11, 2024
1 parent d9d342d commit bcafc53
Showing 1 changed file with 98 additions and 70 deletions.
168 changes: 98 additions & 70 deletions gatsby-node.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const { createFilePath } = require(`gatsby-source-filesystem`);
const { exec, execSync } = require("child_process");
const util = require("node:util");
const execAsync = util.promisify(exec);
const asyncFs = require("node:fs/promises");

const {
replacePathVersion,
Expand Down Expand Up @@ -111,6 +112,8 @@ exports.onCreateNode = async ({
});
}

// these are a template of sorts, used to generate a function index for a reference section
// see tools/automation/generators/refbuilder/refbuilder.js for details
if (node.absolutePath.endsWith("index.mdx.src")) {
const indexBasePath = path.dirname(node.absolutePath);
const referenceIndexNodeDef = {
Expand Down Expand Up @@ -268,42 +271,7 @@ exports.createPages = async ({ actions, graphql, reporter }) => {

processFileNodes(result.data.allPublicFile.nodes, productVersions, actions);

// reference indexer - generate an index page for reference material
// triggered by the presence of index.mdx.src in a directory. When found,
// - scans pages (recursive into subdirectories)
// - identifies reference sections matching specific pattern
// - (re)generates index.json
// - (re)generates index.mdx
// THESE GENERATED FILES MUST BE COMMITTED ALONG WITH THE CHANGED
// PAGES - we do not want them to be out of sync in the repo
// To emphasize this, generation will only happen locally; CI builds
// (identified by the presence of GITHUB_REF in the environment) will skip this step.
// For more information, see:
// - tools/automation/generators/refbuilder/refbuilder.js
if (!process.env.GITHUB_REF) {
const activity = reporter.createProgress(
"Generating reference indexes",
result.data.allReferenceIndex.nodes.length,
);
activity.start();

for (let indexBasePath of result.data.allReferenceIndex.nodes.map(
(node) => node.indexBasePath,
)) {
const command = `node ${path.join(
referenceIndexerToolPath,
"refbuilder.js",
)} --source ${indexBasePath}`;
try {
await execAsync(command);
} catch (err) {
reporter.panicOnBuild(`Failed to generate index for reference docs in ${indexBasePath}:
${err}`);
}
activity.tick();
}
activity.done();
}
await processReferenceIndexes(result.data.allReferenceIndex.nodes, reporter);

// perform depth first preorder traversal
const treeRoot = mdxNodesToTree(nodes);
Expand Down Expand Up @@ -462,6 +430,54 @@ const createAdvocacy = (navTree, prevNext, doc, productVersions, actions) => {
});
};

/**
*
* @param {ReferenceIndex} referenceIndexNodes Nodes to process
* @param {*} reporter Gatsby reporter
* @summary reference indexer - generate an index page for reference pages, triggered by a node corresponding to an index.mdx.src
* @description ReferenceIndex nodes are created when an index.mdx.src is found in a directory. This routine
* processes them by kicking off a subprocess, which...
* - scans pages (recursive into subdirectories)
* - identifies reference sections matching specific pattern
* - (re)generates index.json
* - (re)generates index.mdx
*
* THESE GENERATED FILES MUST BE COMMITTED ALONG WITH THE CHANGED
* PAGES - we do not want them to be out of sync in the repo
* To emphasize this, generation will only happen locally; CI builds
* (identified by the presence of GITHUB_REF in the environment) will skip this step.
*
* For more information, see:
* - tools/automation/generators/refbuilder/refbuilder.js
* - {@link onCreateNode}
*/
const processReferenceIndexes = async (referenceIndexNodes, reporter) => {
if (process.env.GITHUB_REF) return;

const activity = reporter.createProgress(
"Generating reference indexes",
referenceIndexNodes.length,
);
activity.start();

for (let indexBasePath of referenceIndexNodes.map(
(node) => node.indexBasePath,
)) {
const command = `node ${path.join(
referenceIndexerToolPath,
"refbuilder.js",
)} --source ${indexBasePath}`;
try {
await execAsync(command);
} catch (err) {
reporter.panicOnBuild(`Failed to generate index for reference docs in ${indexBasePath}:
${err}`);
}
activity.tick();
}
activity.done();
};

/**
*
* @param {PublicFile} fileNodes Nodes to process
Expand Down Expand Up @@ -614,40 +630,7 @@ exports.onPostBuild = async ({ graphql, reporter, pathPrefix }) => {
//
// get rid of compilation hash - speeds up netlify deploys
//
const hashTimer = reporter.createProgress("Removing compilation hashes");
hashTimer.start();

const { globby } = await import("globby");
const generatedHTML = await globby([
path.join(__dirname, "/public/**/*.html"),
]);
hashTimer.total = generatedHTML.length;
for (
let i = 0, filename;
i < generatedHTML.length, (filename = generatedHTML[i]);
++i
) {
let file = await readFile(filename);
file = file.replace(
/window\.___webpackCompilationHash="[^"]+"/,
'window.___webpackCompilationHash=""',
);
await writeFile(filename, file);
hashTimer.tick();
}
const appDataFilename = path.join(
__dirname,
"/public/page-data/app-data.json",
);
const appData = await readFile(appDataFilename);
await writeFile(
appDataFilename,
appData.replace(
/"webpackCompilationHash":"[^"]+"/,
'"webpackCompilationHash":""',
),
);
hashTimer.done();
await removeCompilationHashes(reporter);

//
// additional headers
Expand Down Expand Up @@ -782,3 +765,48 @@ exports.onPostBuild = async ({ graphql, reporter, pathPrefix }) => {
${pathPrefix}/* /:splat 200`,
);
};

/**
* Strip compilation hashes from generated HTML
* this speeds up Netlify deploys, as (otherwise unchanged) files don't change every build
* there probably should be a faster / more elegant way to do this, possibly by overriding one of the
* default webpack configs... But I've had no luck doing so up to now.
* @param {*} reporter Gatsby reporter
*/
async function removeCompilationHashes(reporter) {
const hashTimer = reporter.createProgress("Removing compilation hashes");
hashTimer.start();

const { globby } = await import("globby");
const generatedHTML = await globby([
path.join(__dirname, "/public/**/*.html"),
]);
const hashPattern = /window\.___webpackCompilationHash="[^"]+"/;
const hashReplace = 'window.___webpackCompilationHash=""';
hashTimer.total = generatedHTML.length;
const chunkSize = 100;
for (let i = 0; i < generatedHTML.length; i += chunkSize) {
const hashPromises = generatedHTML
.slice(i, i + chunkSize)
.map(async (filename) => {
let file = await readFile(filename);
file = file.replace(hashPattern, hashReplace);
await writeFile(filename, file);
hashTimer.tick();
});
await Promise.all(hashPromises);
}
const appDataFilename = path.join(
__dirname,
"/public/page-data/app-data.json",
);
const appData = await readFile(appDataFilename);
await writeFile(
appDataFilename,
appData.replace(
/"webpackCompilationHash":"[^"]+"/,
'"webpackCompilationHash":""',
),
);
hashTimer.done();
}

0 comments on commit bcafc53

Please sign in to comment.