Skip to content

Commit

Permalink
Switch to ECMAScript modules (#1298)
Browse files Browse the repository at this point in the history
This should allow to bump dependencies that have switched to ESM modules as
well and can no longer be imported as CommonJS modules.
  • Loading branch information
tidoust authored Jul 24, 2024
1 parent 3f412d9 commit ce5842b
Show file tree
Hide file tree
Showing 28 changed files with 174 additions and 140 deletions.
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
"engines": {
"node": ">=20"
},
"type": "module",
"devDependencies": {
"@actions/core": "1.10.1",
"@jsdevtools/npm-publish": "3.1.1",
Expand Down
14 changes: 8 additions & 6 deletions test/css/all.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,15 @@
* data because that view is a strict subset of the curated view.
*/

const assert = require('assert').strict;
const path = require('path');
const css = require('@webref/css');
const index = require('../../curated/index.json');
const { definitionSyntax } = require('css-tree');
import { strict as assert } from 'node:assert';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import css from '@webref/css';
import index from '../../curated/index.json' with { type: 'json' };
import { definitionSyntax } from 'css-tree';

const curatedFolder = path.join(__dirname, '..', '..', 'curated', 'css');
const scriptPath = path.dirname(fileURLToPath(import.meta.url));
const curatedFolder = path.join(scriptPath, '..', '..', 'curated', 'css');

// Expected content in CSS extracts
const cssValues = [
Expand Down
6 changes: 3 additions & 3 deletions test/css/package.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const assert = require('assert').strict;
import { strict as assert } from 'node:assert';

const cssPackage = require('../../packages/css/package.json');
const rootPackage = require('../../package.json');
import cssPackage from '../../packages/css/package.json' with { type: 'json' };
import rootPackage from '../../package.json' with { type: 'json' };

describe('The @webref/css package', () => {
it('uses the same version of css-tree as main package', () => {
Expand Down
10 changes: 6 additions & 4 deletions test/elements/all.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,13 @@
* the data because that view is a strict subset of the curated view.
*/

const assert = require('assert').strict;
const path = require('path');
const elements = require('@webref/elements');
import { strict as assert } from 'node:assert';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import elements from '@webref/elements';

const curatedFolder = path.join(__dirname, '..', '..', 'curated', 'elements');
const scriptPath = path.dirname(fileURLToPath(import.meta.url));
const curatedFolder = path.join(scriptPath, '..', '..', 'curated', 'elements');

describe('The curated view of elements extracts', function () {
it('contains valid JSON and expected properties', async function () {
Expand Down
14 changes: 8 additions & 6 deletions test/elements/consistency.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,21 @@
* view because of some missing IDL definition in that view.
*/

const assert = require('assert').strict;
const path = require('path');
const elements = require('@webref/elements');
const idl = require('@webref/idl');
import { strict as assert } from 'node:assert';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import elements from '@webref/elements';
import idl from '@webref/idl';

const scriptPath = path.dirname(fileURLToPath(import.meta.url));
const views = [
{
name: 'curated',
folder: path.join(__dirname, '..', '..', 'curated')
folder: path.join(scriptPath, '..', '..', 'curated')
},
{
name: '@webref/elements package',
folder: path.join(__dirname, '..', '..', 'packages')
folder: path.join(scriptPath, '..', '..', 'packages')
}
];

Expand Down
15 changes: 9 additions & 6 deletions test/events/all.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,16 @@
* the data because that view is a strict subset of the curated view.
*/

const assert = require('assert').strict;
const path = require('path');
const events = require('@webref/events');
const idl = require('@webref/idl');
const { getInterfaceTreeInfo } = require('reffy');
import { strict as assert } from 'node:assert';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import events from '@webref/events';
import idl from '@webref/idl';
import reffy from 'reffy';
const getInterfaceTreeInfo = reffy.getInterfaceTreeInfo;

const curatedFolder = path.join(__dirname, '..', '..', 'curated');
const scriptPath = path.dirname(fileURLToPath(import.meta.url));
const curatedFolder = path.join(scriptPath, '..', '..', 'curated');

let allEvents = null;
const interfaces = new Set();
Expand Down
10 changes: 6 additions & 4 deletions test/idl/all.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,13 @@
* data because that view is a strict subset of the curated view.
*/

const assert = require('assert').strict;
const path = require('path');
const idl = require('@webref/idl');
import { strict as assert } from 'node:assert';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import idl from '@webref/idl';

const curatedFolder = path.join(__dirname, '..', '..', 'curated', 'idl');
const scriptPath = path.dirname(fileURLToPath(import.meta.url));
const curatedFolder = path.join(scriptPath, '..', '..', 'curated', 'idl');

describe('The curated view of Web IDL extracts', function () {
this.slow(5000);
Expand Down
15 changes: 9 additions & 6 deletions test/idl/consistency.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,22 @@
* view because of some missing IDL definition in that view.
*/

const assert = require('assert').strict;
const path = require('path');
const idl = require('@webref/idl');
const { studyWebIdl } = require('strudy');
import { strict as assert } from 'node:assert';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import idl from '@webref/idl';
import strudy from 'strudy';
const studyWebIdl = strudy.studyWebIdl;

const scriptPath = path.dirname(fileURLToPath(import.meta.url));
const views = [
{
name: 'curated',
folder: path.join(__dirname, '..', '..', 'curated', 'idl')
folder: path.join(scriptPath, '..', '..', 'curated', 'idl')
},
{
name: '@webref/idl package',
folder: path.join(__dirname, '..', '..', 'packages', 'idl')
folder: path.join(scriptPath, '..', '..', 'packages', 'idl')
}
];

Expand Down
6 changes: 3 additions & 3 deletions test/idl/package.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const assert = require('assert').strict;
import { strict as assert } from 'node:assert';

const idlPackage = require('../../packages/idl/package.json');
const rootPackage = require('../../package.json');
import idlPackage from '../../packages/idl/package.json' with { type: 'json' };
import rootPackage from '../../package.json' with { type: 'json' };

describe('The @webref/idl package', () => {
it('uses the same version of webidl2.js as main package', () => {
Expand Down
16 changes: 9 additions & 7 deletions test/idl/validate.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,24 +9,26 @@
* the package view, e.g. due to missing base interfaces.
*/

const assert = require('assert').strict;
const path = require('path');
const WebIDL2 = require('webidl2');
const idl = require('@webref/idl');
import { strict as assert } from 'node:assert';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { validate as validateWebIdl } from 'webidl2';
import idl from '@webref/idl';

const scriptPath = path.dirname(fileURLToPath(import.meta.url));
const curatedView = {
name: 'curated',
folder: path.join(__dirname, '..', '..', 'curated', 'idl')
folder: path.join(scriptPath, '..', '..', 'curated', 'idl')
};
const packageView = {
name: '@webref/idl package',
folder: path.join(__dirname, '..', '..', 'packages', 'idl')
folder: path.join(scriptPath, '..', '..', 'packages', 'idl')
};

// Wrapper around the WebIDL2.js validation function to ignore
// [LegacyNoInterfaceObject] "errors".
function validate(ast) {
const validations = WebIDL2.validate(ast).filter(v => {
const validations = validateWebIdl(ast).filter(v => {
return v.ruleName !== 'no-nointerfaceobject';
});
if (!validations.length) {
Expand Down
23 changes: 13 additions & 10 deletions test/schemas.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,22 @@
* The tests run against the curated view of the extracts.
*/

const fs = require('fs');
const path = require('path');
const assert = require('assert').strict;
const { getSchemaValidationFunction } = require('reffy');
import { strict as assert } from 'node:assert';
import fs from 'node:fs';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import reffy from 'reffy';
import { loadJSON } from '../tools/utils.js';

const curatedFolder = path.join(__dirname, '..', 'curated');
const scriptPath = path.dirname(fileURLToPath(import.meta.url));
const curatedFolder = path.join(scriptPath, '..', 'curated');
const files = fs.readdirSync(curatedFolder);
for (const file of files) {
const validate = getSchemaValidationFunction(file);
const validate = reffy.getSchemaValidationFunction(file);
if (file.endsWith('.json')) {
describe(`The ${file} file`, function () {
it('contains valid data', function () {
const data = require(path.join(curatedFolder, file));
it('contains valid data', async function () {
const data = await loadJSON(path.join(curatedFolder, file));
const errors = validate(data);
assert.strictEqual(errors, null, JSON.stringify(errors, null, 2));
});
Expand All @@ -29,8 +32,8 @@ for (const file of files) {
const files = fs.readdirSync(folder);
for (const file of files) {
if (file.endsWith('.json')) {
it(`contains valid ${extractType} data in ${file}`, () => {
const data = require(path.join(folder, file));
it(`contains valid ${extractType} data in ${file}`, async () => {
const data = await loadJSON(path.join(folder, file));
const errors = validate(data);
assert.strictEqual(errors, null, JSON.stringify(errors, null, 2));
});
Expand Down
14 changes: 8 additions & 6 deletions tools/amend-event-data.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,12 @@
* and update (default is "curated")
*/

const fs = require('fs').promises;
const path = require('path');
const loadJSON = require('./utils').loadJSON;
const expandCrawlResult = require('reffy').expandCrawlResult;
import fs from 'node:fs/promises';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { loadJSON } from './utils.js';
import reffy from 'reffy';
const expandCrawlResult = reffy.expandCrawlResult;

const patches = {
'IndexedDB-3': [
Expand Down Expand Up @@ -651,12 +653,12 @@ async function curateEvents(folder) {
/**************************************************
Export methods for use as module
**************************************************/
module.exports.curateEvents = curateEvents;
export { curateEvents };

/**************************************************
Code run if the code is run as a stand-alone module
**************************************************/
if (require.main === module) {
if (process.argv[1] === fileURLToPath(import.meta.url)) {
const folder = process.argv[2] ?? 'curated';

curateEvents(folder).catch(e => {
Expand Down
16 changes: 9 additions & 7 deletions tools/apply-patches.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,13 @@
* (default is "all").
*/

const fs = require('fs').promises;
const path = require('path');
const util = require('util');
const execFile = util.promisify(require('child_process').execFile);
const { createFolderIfNeeded } = require('./utils');
import fs from 'node:fs/promises';
import path from 'node:path';
import util from 'node:util';
import { fileURLToPath } from 'node:url';
import { execFile as execCb } from 'node:child_process';
import { createFolderIfNeeded } from './utils.js';
const execFile = util.promisify(execCb);

async function applyPatches(rawFolder, outputFolder, type) {
type = (type === 'all') ? ['css', 'elements', 'idl'] : [type];
Expand Down Expand Up @@ -93,13 +95,13 @@ async function applyPatches(rawFolder, outputFolder, type) {
/**************************************************
Export methods for use as module
**************************************************/
module.exports.applyPatches = applyPatches;
export { applyPatches };


/**************************************************
Code run if the code is run as a stand-alone module
**************************************************/
if (require.main === module) {
if (process.argv[1] === fileURLToPath(import.meta.url)) {
const rawFolder = process.argv[2] ?? 'ed';
const outputFolder = process.argv[3] ?? 'curated';
const type = process.argv[4] ?? 'all';
Expand Down
14 changes: 8 additions & 6 deletions tools/bump-packages-minor.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,16 @@
* means a minor bump is already pending release.
*/

const fs = require('fs').promises;
const path = require('path');
const { loadJSON } = require('./utils');
const { execSync } = require('child_process');
import fs from 'node:fs/promises';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { execSync } from 'node:child_process';
import { loadJSON } from './utils.js';
const scriptPath = path.dirname(fileURLToPath(import.meta.url));

async function checkPackage(type) {
console.log(`Check ${type} package`);
const packageFile = path.resolve(__dirname, '..', 'packages', type, 'package.json');
const packageFile = path.resolve(scriptPath, '..', 'packages', type, 'package.json');
const package = await loadJSON(packageFile);
const version = package.version;
console.log(`- Current version: ${version}`);
Expand Down Expand Up @@ -56,7 +58,7 @@ async function checkPackage(type) {


async function checkPackages() {
const packagesFolder = path.resolve(__dirname, '..', 'packages');
const packagesFolder = path.resolve(scriptPath, '..', 'packages');
const types = await fs.readdir(packagesFolder);
for (const type of types) {
const stat = await fs.lstat(path.join(packagesFolder, type));
Expand Down
6 changes: 3 additions & 3 deletions tools/clean-abandoned-files.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const fs = require("fs");
import fs from "node:fs";

const ed = require("../ed/index.json");
const tr = require("../tr/index.json");
import ed from "../ed/index.json" with { type: 'json' };
import tr from "../tr/index.json" with { type: 'json' };

const removeExtension = f => {
const components = f.split(".");
Expand Down
6 changes: 3 additions & 3 deletions tools/clean-dropped-specs-files.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
* remain subject to human review.
*/

const fs = require("fs").promises;
const path = require("path");
const { loadJSON } = require('./utils');
import fs from "node:fs/promises";
import path from "node:path";
import { loadJSON } from './utils.js';

async function cleanExtractFolder(folder, crawlResults) {
const dir = await fs.readdir(folder);
Expand Down
13 changes: 7 additions & 6 deletions tools/clean-patches.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@
* a pull request to drop patches that should no longer be needed.
*/

const core = require('@actions/core');
const Octokit = require("./octokit");
const fs = require("fs");
const path = require("path");

import fs from "node:fs";
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import core from '@actions/core';
import Octokit from "./octokit.js";
const scriptPath = path.dirname(fileURLToPath(import.meta.url));

/**
* Check GitHub issues and PR referenced by patch files and drop patch files
Expand All @@ -19,7 +20,7 @@ const path = require("path");
* empty string when there are no patches to drop.
*/
async function dropPatchesWhenPossible() {
const rootDir = path.join(__dirname, "..", "ed");
const rootDir = path.join(scriptPath, "..", "ed");

console.log("Gather patch files");
let patches = [];
Expand Down
Loading

0 comments on commit ce5842b

Please sign in to comment.