From ce5842b19953b7fb6b45cb5365d8fa90a24719f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois=20Daoust?= Date: Wed, 24 Jul 2024 11:19:14 +0200 Subject: [PATCH] Switch to ECMAScript modules (#1298) This should allow to bump dependencies that have switched to ESM modules as well and can no longer be imported as CommonJS modules. --- package.json | 1 + test/css/all.js | 14 ++++++++------ test/css/package.js | 6 +++--- test/elements/all.js | 10 ++++++---- test/elements/consistency.js | 14 ++++++++------ test/events/all.js | 15 +++++++++------ test/idl/all.js | 10 ++++++---- test/idl/consistency.js | 15 +++++++++------ test/idl/package.js | 6 +++--- test/idl/validate.js | 16 +++++++++------- test/schemas.js | 23 +++++++++++++---------- tools/amend-event-data.js | 14 ++++++++------ tools/apply-patches.js | 16 +++++++++------- tools/bump-packages-minor.js | 14 ++++++++------ tools/clean-abandoned-files.js | 6 +++--- tools/clean-dropped-specs-files.js | 6 +++--- tools/clean-patches.js | 13 +++++++------ tools/commit-curated.js | 12 ++++++------ tools/create-patch.js | 11 ++++++----- tools/css-json-to-ttl.js | 2 +- tools/drop-css-property-duplicates.js | 19 +++++++++++-------- tools/octokit.js | 7 ++++--- tools/prepare-curated.js | 19 ++++++++++--------- tools/prepare-packages.js | 11 ++++++----- tools/prepare-release.js | 12 ++++++------ tools/release-package.js | 14 +++++++------- tools/request-pr-review.js | 2 +- tools/utils.js | 6 +++--- 28 files changed, 174 insertions(+), 140 deletions(-) diff --git a/package.json b/package.json index 6591c72ef971..583b88a6b200 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,7 @@ "engines": { "node": ">=20" }, + "type": "module", "devDependencies": { "@actions/core": "1.10.1", "@jsdevtools/npm-publish": "3.1.1", diff --git a/test/css/all.js b/test/css/all.js index 2183cc7a3e0c..c7671adc0753 100644 --- a/test/css/all.js +++ b/test/css/all.js @@ -7,13 +7,15 @@ * data because that view is a strict subset of the curated view. */ -const assert = require('assert').strict; -const path = require('path'); -const css = require('@webref/css'); -const index = require('../../curated/index.json'); -const { definitionSyntax } = require('css-tree'); +import { strict as assert } from 'node:assert'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import css from '@webref/css'; +import index from '../../curated/index.json' with { type: 'json' }; +import { definitionSyntax } from 'css-tree'; -const curatedFolder = path.join(__dirname, '..', '..', 'curated', 'css'); +const scriptPath = path.dirname(fileURLToPath(import.meta.url)); +const curatedFolder = path.join(scriptPath, '..', '..', 'curated', 'css'); // Expected content in CSS extracts const cssValues = [ diff --git a/test/css/package.js b/test/css/package.js index 951343189c2c..ff286bb012d2 100644 --- a/test/css/package.js +++ b/test/css/package.js @@ -1,7 +1,7 @@ -const assert = require('assert').strict; +import { strict as assert } from 'node:assert'; -const cssPackage = require('../../packages/css/package.json'); -const rootPackage = require('../../package.json'); +import cssPackage from '../../packages/css/package.json' with { type: 'json' }; +import rootPackage from '../../package.json' with { type: 'json' }; describe('The @webref/css package', () => { it('uses the same version of css-tree as main package', () => { diff --git a/test/elements/all.js b/test/elements/all.js index 47ba3fb5f172..7b0b4482abba 100644 --- a/test/elements/all.js +++ b/test/elements/all.js @@ -7,11 +7,13 @@ * the data because that view is a strict subset of the curated view. */ -const assert = require('assert').strict; -const path = require('path'); -const elements = require('@webref/elements'); +import { strict as assert } from 'node:assert'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import elements from '@webref/elements'; -const curatedFolder = path.join(__dirname, '..', '..', 'curated', 'elements'); +const scriptPath = path.dirname(fileURLToPath(import.meta.url)); +const curatedFolder = path.join(scriptPath, '..', '..', 'curated', 'elements'); describe('The curated view of elements extracts', function () { it('contains valid JSON and expected properties', async function () { diff --git a/test/elements/consistency.js b/test/elements/consistency.js index ad91e6862ddb..ed66fea43a33 100644 --- a/test/elements/consistency.js +++ b/test/elements/consistency.js @@ -6,19 +6,21 @@ * view because of some missing IDL definition in that view. */ -const assert = require('assert').strict; -const path = require('path'); -const elements = require('@webref/elements'); -const idl = require('@webref/idl'); +import { strict as assert } from 'node:assert'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import elements from '@webref/elements'; +import idl from '@webref/idl'; +const scriptPath = path.dirname(fileURLToPath(import.meta.url)); const views = [ { name: 'curated', - folder: path.join(__dirname, '..', '..', 'curated') + folder: path.join(scriptPath, '..', '..', 'curated') }, { name: '@webref/elements package', - folder: path.join(__dirname, '..', '..', 'packages') + folder: path.join(scriptPath, '..', '..', 'packages') } ]; diff --git a/test/events/all.js b/test/events/all.js index c2f150254aac..5f97b914334b 100644 --- a/test/events/all.js +++ b/test/events/all.js @@ -7,13 +7,16 @@ * the data because that view is a strict subset of the curated view. */ -const assert = require('assert').strict; -const path = require('path'); -const events = require('@webref/events'); -const idl = require('@webref/idl'); -const { getInterfaceTreeInfo } = require('reffy'); +import { strict as assert } from 'node:assert'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import events from '@webref/events'; +import idl from '@webref/idl'; +import reffy from 'reffy'; +const getInterfaceTreeInfo = reffy.getInterfaceTreeInfo; -const curatedFolder = path.join(__dirname, '..', '..', 'curated'); +const scriptPath = path.dirname(fileURLToPath(import.meta.url)); +const curatedFolder = path.join(scriptPath, '..', '..', 'curated'); let allEvents = null; const interfaces = new Set(); diff --git a/test/idl/all.js b/test/idl/all.js index cc0285af19fa..c6f23947b381 100644 --- a/test/idl/all.js +++ b/test/idl/all.js @@ -7,11 +7,13 @@ * data because that view is a strict subset of the curated view. */ -const assert = require('assert').strict; -const path = require('path'); -const idl = require('@webref/idl'); +import { strict as assert } from 'node:assert'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import idl from '@webref/idl'; -const curatedFolder = path.join(__dirname, '..', '..', 'curated', 'idl'); +const scriptPath = path.dirname(fileURLToPath(import.meta.url)); +const curatedFolder = path.join(scriptPath, '..', '..', 'curated', 'idl'); describe('The curated view of Web IDL extracts', function () { this.slow(5000); diff --git a/test/idl/consistency.js b/test/idl/consistency.js index 38e7295b5d4f..7e7ad7a1f1a8 100644 --- a/test/idl/consistency.js +++ b/test/idl/consistency.js @@ -10,19 +10,22 @@ * view because of some missing IDL definition in that view. */ -const assert = require('assert').strict; -const path = require('path'); -const idl = require('@webref/idl'); -const { studyWebIdl } = require('strudy'); +import { strict as assert } from 'node:assert'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import idl from '@webref/idl'; +import strudy from 'strudy'; +const studyWebIdl = strudy.studyWebIdl; +const scriptPath = path.dirname(fileURLToPath(import.meta.url)); const views = [ { name: 'curated', - folder: path.join(__dirname, '..', '..', 'curated', 'idl') + folder: path.join(scriptPath, '..', '..', 'curated', 'idl') }, { name: '@webref/idl package', - folder: path.join(__dirname, '..', '..', 'packages', 'idl') + folder: path.join(scriptPath, '..', '..', 'packages', 'idl') } ]; diff --git a/test/idl/package.js b/test/idl/package.js index d1299bf1072e..40794682f9a6 100644 --- a/test/idl/package.js +++ b/test/idl/package.js @@ -1,7 +1,7 @@ -const assert = require('assert').strict; +import { strict as assert } from 'node:assert'; -const idlPackage = require('../../packages/idl/package.json'); -const rootPackage = require('../../package.json'); +import idlPackage from '../../packages/idl/package.json' with { type: 'json' }; +import rootPackage from '../../package.json' with { type: 'json' }; describe('The @webref/idl package', () => { it('uses the same version of webidl2.js as main package', () => { diff --git a/test/idl/validate.js b/test/idl/validate.js index f1fb7795d5e7..d5dd48f51e23 100644 --- a/test/idl/validate.js +++ b/test/idl/validate.js @@ -9,24 +9,26 @@ * the package view, e.g. due to missing base interfaces. */ -const assert = require('assert').strict; -const path = require('path'); -const WebIDL2 = require('webidl2'); -const idl = require('@webref/idl'); +import { strict as assert } from 'node:assert'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import { validate as validateWebIdl } from 'webidl2'; +import idl from '@webref/idl'; +const scriptPath = path.dirname(fileURLToPath(import.meta.url)); const curatedView = { name: 'curated', - folder: path.join(__dirname, '..', '..', 'curated', 'idl') + folder: path.join(scriptPath, '..', '..', 'curated', 'idl') }; const packageView = { name: '@webref/idl package', - folder: path.join(__dirname, '..', '..', 'packages', 'idl') + folder: path.join(scriptPath, '..', '..', 'packages', 'idl') }; // Wrapper around the WebIDL2.js validation function to ignore // [LegacyNoInterfaceObject] "errors". function validate(ast) { - const validations = WebIDL2.validate(ast).filter(v => { + const validations = validateWebIdl(ast).filter(v => { return v.ruleName !== 'no-nointerfaceobject'; }); if (!validations.length) { diff --git a/test/schemas.js b/test/schemas.js index cb3019491286..f5d6ef4006c8 100644 --- a/test/schemas.js +++ b/test/schemas.js @@ -4,19 +4,22 @@ * The tests run against the curated view of the extracts. */ -const fs = require('fs'); -const path = require('path'); -const assert = require('assert').strict; -const { getSchemaValidationFunction } = require('reffy'); +import { strict as assert } from 'node:assert'; +import fs from 'node:fs'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import reffy from 'reffy'; +import { loadJSON } from '../tools/utils.js'; -const curatedFolder = path.join(__dirname, '..', 'curated'); +const scriptPath = path.dirname(fileURLToPath(import.meta.url)); +const curatedFolder = path.join(scriptPath, '..', 'curated'); const files = fs.readdirSync(curatedFolder); for (const file of files) { - const validate = getSchemaValidationFunction(file); + const validate = reffy.getSchemaValidationFunction(file); if (file.endsWith('.json')) { describe(`The ${file} file`, function () { - it('contains valid data', function () { - const data = require(path.join(curatedFolder, file)); + it('contains valid data', async function () { + const data = await loadJSON(path.join(curatedFolder, file)); const errors = validate(data); assert.strictEqual(errors, null, JSON.stringify(errors, null, 2)); }); @@ -29,8 +32,8 @@ for (const file of files) { const files = fs.readdirSync(folder); for (const file of files) { if (file.endsWith('.json')) { - it(`contains valid ${extractType} data in ${file}`, () => { - const data = require(path.join(folder, file)); + it(`contains valid ${extractType} data in ${file}`, async () => { + const data = await loadJSON(path.join(folder, file)); const errors = validate(data); assert.strictEqual(errors, null, JSON.stringify(errors, null, 2)); }); diff --git a/tools/amend-event-data.js b/tools/amend-event-data.js index 8610865577c7..9dee18d71c9c 100644 --- a/tools/amend-event-data.js +++ b/tools/amend-event-data.js @@ -10,10 +10,12 @@ * and update (default is "curated") */ -const fs = require('fs').promises; -const path = require('path'); -const loadJSON = require('./utils').loadJSON; -const expandCrawlResult = require('reffy').expandCrawlResult; +import fs from 'node:fs/promises'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import { loadJSON } from './utils.js'; +import reffy from 'reffy'; +const expandCrawlResult = reffy.expandCrawlResult; const patches = { 'IndexedDB-3': [ @@ -651,12 +653,12 @@ async function curateEvents(folder) { /************************************************** Export methods for use as module **************************************************/ -module.exports.curateEvents = curateEvents; +export { curateEvents }; /************************************************** Code run if the code is run as a stand-alone module **************************************************/ -if (require.main === module) { +if (process.argv[1] === fileURLToPath(import.meta.url)) { const folder = process.argv[2] ?? 'curated'; curateEvents(folder).catch(e => { diff --git a/tools/apply-patches.js b/tools/apply-patches.js index c85cce9b864e..8bc6b2f2ec24 100644 --- a/tools/apply-patches.js +++ b/tools/apply-patches.js @@ -14,11 +14,13 @@ * (default is "all"). */ -const fs = require('fs').promises; -const path = require('path'); -const util = require('util'); -const execFile = util.promisify(require('child_process').execFile); -const { createFolderIfNeeded } = require('./utils'); +import fs from 'node:fs/promises'; +import path from 'node:path'; +import util from 'node:util'; +import { fileURLToPath } from 'node:url'; +import { execFile as execCb } from 'node:child_process'; +import { createFolderIfNeeded } from './utils.js'; +const execFile = util.promisify(execCb); async function applyPatches(rawFolder, outputFolder, type) { type = (type === 'all') ? ['css', 'elements', 'idl'] : [type]; @@ -93,13 +95,13 @@ async function applyPatches(rawFolder, outputFolder, type) { /************************************************** Export methods for use as module **************************************************/ -module.exports.applyPatches = applyPatches; +export { applyPatches }; /************************************************** Code run if the code is run as a stand-alone module **************************************************/ -if (require.main === module) { +if (process.argv[1] === fileURLToPath(import.meta.url)) { const rawFolder = process.argv[2] ?? 'ed'; const outputFolder = process.argv[3] ?? 'curated'; const type = process.argv[4] ?? 'all'; diff --git a/tools/bump-packages-minor.js b/tools/bump-packages-minor.js index 5e70f13c92d5..1b897c3f99af 100644 --- a/tools/bump-packages-minor.js +++ b/tools/bump-packages-minor.js @@ -13,14 +13,16 @@ * means a minor bump is already pending release. */ -const fs = require('fs').promises; -const path = require('path'); -const { loadJSON } = require('./utils'); -const { execSync } = require('child_process'); +import fs from 'node:fs/promises'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import { execSync } from 'node:child_process'; +import { loadJSON } from './utils.js'; +const scriptPath = path.dirname(fileURLToPath(import.meta.url)); async function checkPackage(type) { console.log(`Check ${type} package`); - const packageFile = path.resolve(__dirname, '..', 'packages', type, 'package.json'); + const packageFile = path.resolve(scriptPath, '..', 'packages', type, 'package.json'); const package = await loadJSON(packageFile); const version = package.version; console.log(`- Current version: ${version}`); @@ -56,7 +58,7 @@ async function checkPackage(type) { async function checkPackages() { - const packagesFolder = path.resolve(__dirname, '..', 'packages'); + const packagesFolder = path.resolve(scriptPath, '..', 'packages'); const types = await fs.readdir(packagesFolder); for (const type of types) { const stat = await fs.lstat(path.join(packagesFolder, type)); diff --git a/tools/clean-abandoned-files.js b/tools/clean-abandoned-files.js index dfcb6b547fe3..6e458dad4db9 100644 --- a/tools/clean-abandoned-files.js +++ b/tools/clean-abandoned-files.js @@ -1,7 +1,7 @@ -const fs = require("fs"); +import fs from "node:fs"; -const ed = require("../ed/index.json"); -const tr = require("../tr/index.json"); +import ed from "../ed/index.json" with { type: 'json' }; +import tr from "../tr/index.json" with { type: 'json' }; const removeExtension = f => { const components = f.split("."); diff --git a/tools/clean-dropped-specs-files.js b/tools/clean-dropped-specs-files.js index 15a0f390b280..2a080f857742 100644 --- a/tools/clean-dropped-specs-files.js +++ b/tools/clean-dropped-specs-files.js @@ -17,9 +17,9 @@ * remain subject to human review. */ -const fs = require("fs").promises; -const path = require("path"); -const { loadJSON } = require('./utils'); +import fs from "node:fs/promises"; +import path from "node:path"; +import { loadJSON } from './utils.js'; async function cleanExtractFolder(folder, crawlResults) { const dir = await fs.readdir(folder); diff --git a/tools/clean-patches.js b/tools/clean-patches.js index 4fe34853ea46..3f11c93af086 100644 --- a/tools/clean-patches.js +++ b/tools/clean-patches.js @@ -3,11 +3,12 @@ * a pull request to drop patches that should no longer be needed. */ -const core = require('@actions/core'); -const Octokit = require("./octokit"); -const fs = require("fs"); -const path = require("path"); - +import fs from "node:fs"; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import core from '@actions/core'; +import Octokit from "./octokit.js"; +const scriptPath = path.dirname(fileURLToPath(import.meta.url)); /** * Check GitHub issues and PR referenced by patch files and drop patch files @@ -19,7 +20,7 @@ const path = require("path"); * empty string when there are no patches to drop. */ async function dropPatchesWhenPossible() { - const rootDir = path.join(__dirname, "..", "ed"); + const rootDir = path.join(scriptPath, "..", "ed"); console.log("Gather patch files"); let patches = []; diff --git a/tools/commit-curated.js b/tools/commit-curated.js index 74f3bccff876..051afbb947a4 100644 --- a/tools/commit-curated.js +++ b/tools/commit-curated.js @@ -12,12 +12,12 @@ * otherwise) */ -const util = require('util'); -const path = require('path'); -const fs = require('fs').promises; -const { rimraf } = require('rimraf'); -const { execSync } = require('child_process'); -const { copyFolder, createFolderIfNeeded } = require('./utils'); +import util from 'node:util'; +import path from 'node:path'; +import fs from 'node:fs/promises'; +import { execSync } from 'node:child_process'; +import { rimraf } from 'rimraf'; +import { copyFolder, createFolderIfNeeded } from './utils.js'; /** diff --git a/tools/create-patch.js b/tools/create-patch.js index 3826d181f67b..d528f2c56e3f 100644 --- a/tools/create-patch.js +++ b/tools/create-patch.js @@ -7,11 +7,12 @@ * node tools/create-patch.js */ -const fs = require('fs/promises'); -const util = require('util'); -const path = require('path'); -const exec = util.promisify(require('child_process').exec); -const execFile = util.promisify(require('child_process').execFile); +import fs from 'node:fs/promises'; +import util from 'node:util'; +import path from 'node:path'; +import { exec as execCb, execFile as execFileCb } from 'node:child_process'; +const exec = util.promisify(execCb); +const execFile = util.promisify(execFileCb); async function main() { console.log('Check last commit touches one and only one CSS/Elements/IDL file...'); diff --git a/tools/css-json-to-ttl.js b/tools/css-json-to-ttl.js index aecf8407feda..555082c65029 100644 --- a/tools/css-json-to-ttl.js +++ b/tools/css-json-to-ttl.js @@ -1,6 +1,6 @@ // Convert JSON on CVSS to Turtle for Css property namespace -const cssData = require( '../ed/css/CSS.json') +import cssData from '../ed/css/CSS.json' with { type: 'json' }; // console.log(JSON.stringify(cssData).slice(0,100)) diff --git a/tools/drop-css-property-duplicates.js b/tools/drop-css-property-duplicates.js index 548e591ec921..1548f0318c70 100644 --- a/tools/drop-css-property-duplicates.js +++ b/tools/drop-css-property-duplicates.js @@ -10,12 +10,15 @@ * and update (default is "curated") */ -const fs = require('fs').promises; -const path = require('path'); -const util = require('util'); -const execFile = util.promisify(require('child_process').execFile); -const loadJSON = require('./utils').loadJSON; -const expandCrawlResult = require('reffy').expandCrawlResult; +import fs from 'node:fs/promises'; +import path from 'node:path'; +import util from 'node:util'; +import { fileURLToPath } from 'node:url'; +import { execFile as execFileCb } from 'node:child_process'; +import { loadJSON } from './utils.js'; +import reffy from 'reffy'; +const expandCrawlResult = reffy.expandCrawlResult; +const execFile = util.promisify(execFileCb); /** @@ -263,13 +266,13 @@ async function dropCSSPropertyDuplicates(folder) { /************************************************** Export methods for use as module **************************************************/ -module.exports.dropCSSPropertyDuplicates = dropCSSPropertyDuplicates; +export { dropCSSPropertyDuplicates }; /************************************************** Code run if the code is run as a stand-alone module **************************************************/ -if (require.main === module) { +if (process.argv[1] === fileURLToPath(import.meta.url)) { const folder = process.argv[2] ?? 'curated'; dropCSSPropertyDuplicates(folder).catch(e => { diff --git a/tools/octokit.js b/tools/octokit.js index 80de14ef627e..38266efd8e6b 100644 --- a/tools/octokit.js +++ b/tools/octokit.js @@ -2,12 +2,13 @@ * Wrapper around Octokit to add throttling and avoid hitting rate limits */ -const { throttling } = require("@octokit/plugin-throttling"); -const Octokit = require("@octokit/rest").Octokit.plugin(throttling); +import { throttling } from "@octokit/plugin-throttling"; +import { Octokit as rawOctokit } from "@octokit/rest"; +const Octokit = rawOctokit.plugin(throttling); const MAX_RETRIES = 3; -module.exports = function (params) { +export default function (params) { params = params || {}; const octoParams = Object.assign({ diff --git a/tools/prepare-curated.js b/tools/prepare-curated.js index 0228e25c085c..cc14398e263a 100644 --- a/tools/prepare-curated.js +++ b/tools/prepare-curated.js @@ -17,17 +17,18 @@ * node tools/prepare-curated.js [raw data folder] [curated folder] */ -const fs = require('fs').promises; -const path = require('path'); -const { rimraf } = require('rimraf'); -const { crawlSpecs } = require('reffy'); -const { +import fs from 'node:fs/promises'; +import path from 'node:path'; +import { rimraf } from 'rimraf'; +import { createFolderIfNeeded, loadJSON, - copyFolder } = require('./utils'); -const { applyPatches } = require('./apply-patches'); -const { dropCSSPropertyDuplicates } = require('./drop-css-property-duplicates'); -const { curateEvents } = require('./amend-event-data'); + copyFolder } from './utils.js'; +import { applyPatches } from './apply-patches.js'; +import { dropCSSPropertyDuplicates } from './drop-css-property-duplicates.js'; +import { curateEvents } from './amend-event-data.js'; +import reffy from 'reffy'; +const crawlSpecs = reffy.crawlSpecs; /** diff --git a/tools/prepare-packages.js b/tools/prepare-packages.js index 5648b46e72ad..8971e4fd5447 100644 --- a/tools/prepare-packages.js +++ b/tools/prepare-packages.js @@ -14,11 +14,12 @@ */ -const fs = require('fs').promises; -const path = require('path'); -const util = require('util'); -const execFile = util.promisify(require('child_process').execFile); -const { loadJSON } = require('./utils'); +import fs from 'node:fs/promises'; +import path from 'node:path'; +import util from 'node:util'; +import { execFile as execFileCb } from 'node:child_process'; +import { loadJSON } from './utils.js'; +const execFile = util.promisify(execFileCb); async function preparePackages(curatedFolder, packagesFolder) { console.log('Load crawl index file'); diff --git a/tools/prepare-release.js b/tools/prepare-release.js index 64a7d9ec2e19..735019b78b37 100644 --- a/tools/prepare-release.js +++ b/tools/prepare-release.js @@ -23,12 +23,12 @@ * - Invalidate or re-request review when PR is updated? */ -const Octokit = require("./octokit"); -const fs = require("fs"); -const path = require("path"); -const os = require("os"); -const { execSync } = require("child_process"); -const { rimraf } = require("rimraf"); +import Octokit from "./octokit.js"; +import fs from "node:fs"; +import path from "node:path"; +import os from "node:os"; +import { execSync } from "node:child_process"; +import { rimraf } from "rimraf"; // Repository to process const owner = "w3c"; diff --git a/tools/release-package.js b/tools/release-package.js index 417369a893c4..0e552e3fb626 100644 --- a/tools/release-package.js +++ b/tools/release-package.js @@ -3,13 +3,13 @@ * PR is based. */ -const Octokit = require("./octokit"); -const fs = require("fs"); -const path = require("path"); -const os = require("os"); -const { execSync } = require("child_process"); -const { rimraf } = require("rimraf"); -const { npmPublish } = require("@jsdevtools/npm-publish"); +import Octokit from "./octokit"; +import fs from "node:fs"; +import path from "node:path"; +import os from "node:os"; +import { execSync } from "node:child_process"; +import { rimraf } from "rimraf"; +import { npmPublish } from "@jsdevtools/npm-publish"; const owner = "w3c"; const repo = "webref"; diff --git a/tools/request-pr-review.js b/tools/request-pr-review.js index a25b8fc89fa1..c1cc7c9504f7 100644 --- a/tools/request-pr-review.js +++ b/tools/request-pr-review.js @@ -2,7 +2,7 @@ * Request a review on a pending pre-release PR */ -const Octokit = require("./octokit"); +import Octokit from "./octokit.js"; // Repository to process and PR reviewers const owner = "w3c"; diff --git a/tools/utils.js b/tools/utils.js index 6ccaa92bfa25..7b942e062ccd 100644 --- a/tools/utils.js +++ b/tools/utils.js @@ -2,8 +2,8 @@ * Common functions for use in tools */ -const fs = require('fs').promises; -const path = require('path'); +import fs from 'node:fs/promises'; +import path from 'node:path'; async function createFolderIfNeeded(folder) { try { @@ -68,7 +68,7 @@ async function copyFolder(source, target, { excludeRoot = false } = {}) { }; -module.exports = { +export { createFolderIfNeeded, loadJSON, copyFolder