diff --git a/.changeset/blue-donkeys-pretend.md b/.changeset/blue-donkeys-pretend.md new file mode 100644 index 000000000000..c4a4861fd2a7 --- /dev/null +++ b/.changeset/blue-donkeys-pretend.md @@ -0,0 +1,11 @@ +--- +"wrangler": patch +--- + +fix: ensure that additional modules appear in the out-dir + +When using `find_additional_modules` (or `no_bundle`) we find files that +will be uploaded to be deployed alongside the Worker. + +Previously, if an `outDir` was specified, only the Worker code was output +to this directory. Now all additional modules are also output there too. diff --git a/.changeset/clever-turkeys-leave.md b/.changeset/clever-turkeys-leave.md new file mode 100644 index 000000000000..ed5274219f0f --- /dev/null +++ b/.changeset/clever-turkeys-leave.md @@ -0,0 +1,20 @@ +--- +"wrangler": minor +--- + +feat: support partial bundling with configurable external modules + +Setting `find_additional_modules` to `true` in your configuration file will now instruct Wrangler to look for files in +your `base_dir` that match your configured `rules`, and deploy them as unbundled, external modules with your Worker. +`base_dir` defaults to the directory containing your `main` entrypoint. + +Wrangler can operate in two modes: the default bundling mode and `--no-bundle` mode. In bundling mode, dynamic imports +(e.g. `await import("./large-dep.mjs")`) would be bundled into your entrypoint, making lazy loading less effective. +Additionally, variable dynamic imports (e.g. `` await import(`./lang/${language}.mjs`) ``) would always fail at runtime, +as Wrangler would have no way of knowing which modules to upload. The `--no-bundle` mode sought to address these issues +by disabling Wrangler's bundling entirely, and just deploying code as is. Unfortunately, this also disabled Wrangler's +code transformations (e.g. TypeScript compilation, `--assets`, `--test-scheduled`, etc). + +With this change, we now additionally support _partial bundling_. Files are bundled into a single Worker entry-point file +unless `find_additional_modules` is `true`, and the file matches one of the configured `rules`. See +https://developers.cloudflare.com/workers/wrangler/bundling/ for more details and examples. diff --git a/.changeset/hot-deers-return.md b/.changeset/hot-deers-return.md new file mode 100644 index 000000000000..356a68640efb --- /dev/null +++ b/.changeset/hot-deers-return.md @@ -0,0 +1,8 @@ +--- +"wrangler": patch +--- + +fix: allow `__STATIC_CONTENT_MANIFEST` module to be imported anywhere + +`__STATIC_CONTENT_MANIFEST` can now be imported in subdirectories when +`--no-bundle` or `find_additional_modules` are enabled. diff --git a/.prettierignore b/.prettierignore index 6eaf10852cc4..ec4881ff9a67 100644 --- a/.prettierignore +++ b/.prettierignore @@ -7,6 +7,9 @@ packages/wrangler/CHANGELOG.md packages/jest-environment-wrangler/CHANGELOG.md packages/wranglerjs-compat-webpack-plugin/lib packages/wrangler-devtools/built-devtools +packages/wrangler-devtools/.cipd +packages/wrangler-devtools/depot +packages/wrangler-devtools/devtools-frontend packages/edge-preview-authenticated-proxy/package.json packages/format-errors/package.json packages/**/dist/** diff --git a/fixtures/additional-modules/package.json b/fixtures/additional-modules/package.json new file mode 100644 index 000000000000..eaa994372e27 --- /dev/null +++ b/fixtures/additional-modules/package.json @@ -0,0 +1,21 @@ +{ + "name": "additional-modules", + "version": "0.0.1", + "private": true, + "scripts": { + "build": "wrangler deploy --dry-run --outdir=dist", + "check:type": "tsc", + "deploy": "wrangler deploy", + "start": "wrangler dev", + "test": "vitest run", + "test:ci": "vitest run", + "test:watch": "vitest", + "type:tests": "tsc -p ./test/tsconfig.json" + }, + "devDependencies": { + "@cloudflare/workers-tsconfig": "workspace:*", + "@cloudflare/workers-types": "^4.20230724.0", + "undici": "^5.9.1", + "wrangler": "workspace:*" + } +} diff --git a/fixtures/additional-modules/src/common.cjs b/fixtures/additional-modules/src/common.cjs new file mode 100644 index 000000000000..19658c121aad --- /dev/null +++ b/fixtures/additional-modules/src/common.cjs @@ -0,0 +1 @@ +module.exports = "common"; diff --git a/fixtures/additional-modules/src/dep.ts b/fixtures/additional-modules/src/dep.ts new file mode 100644 index 000000000000..cc0be2aa49e2 --- /dev/null +++ b/fixtures/additional-modules/src/dep.ts @@ -0,0 +1 @@ +export default "bundled"; diff --git a/fixtures/additional-modules/src/dynamic.js b/fixtures/additional-modules/src/dynamic.js new file mode 100644 index 000000000000..6c40343ed973 --- /dev/null +++ b/fixtures/additional-modules/src/dynamic.js @@ -0,0 +1 @@ +export default "dynamic"; diff --git a/fixtures/additional-modules/src/index.ts b/fixtures/additional-modules/src/index.ts new file mode 100644 index 000000000000..ad2d46ff01f5 --- /dev/null +++ b/fixtures/additional-modules/src/index.ts @@ -0,0 +1,28 @@ +import dep from "./dep"; +import text from "./text.txt"; +import common from "./common.cjs"; + +export default { + async fetch(request) { + const url = new URL(request.url); + if (url.pathname === "/dep") { + return new Response(dep); + } + if (url.pathname === "/text") { + return new Response(text); + } + if (url.pathname === "/common") { + return new Response(common); + } + if (url.pathname === "/dynamic") { + return new Response((await import("./dynamic.js")).default); + } + if (url.pathname.startsWith("/lang/")) { + // Build the path dynamically to ensure esbuild doesn't inline the import. + const language = + "./lang/" + url.pathname.substring("/lang/".length) + ".js"; + return new Response((await import(language)).default.hello); + } + return new Response("Not Found", { status: 404 }); + }, +}; diff --git a/fixtures/additional-modules/src/lang/en.js b/fixtures/additional-modules/src/lang/en.js new file mode 100644 index 000000000000..969f5b911254 --- /dev/null +++ b/fixtures/additional-modules/src/lang/en.js @@ -0,0 +1 @@ +export default { hello: "hello" }; diff --git a/fixtures/additional-modules/src/lang/fr.js b/fixtures/additional-modules/src/lang/fr.js new file mode 100644 index 000000000000..67e5320f2947 --- /dev/null +++ b/fixtures/additional-modules/src/lang/fr.js @@ -0,0 +1 @@ +export default { hello: "bonjour" }; diff --git a/fixtures/additional-modules/src/text.d.ts b/fixtures/additional-modules/src/text.d.ts new file mode 100644 index 000000000000..4695e499251e --- /dev/null +++ b/fixtures/additional-modules/src/text.d.ts @@ -0,0 +1,4 @@ +declare module "*.txt" { + const value: string; + export default value; +} diff --git a/fixtures/additional-modules/src/text.txt b/fixtures/additional-modules/src/text.txt new file mode 100644 index 000000000000..9daeafb9864c --- /dev/null +++ b/fixtures/additional-modules/src/text.txt @@ -0,0 +1 @@ +test diff --git a/fixtures/additional-modules/test/index.test.ts b/fixtures/additional-modules/test/index.test.ts new file mode 100644 index 000000000000..4f9c7aaf1a12 --- /dev/null +++ b/fixtures/additional-modules/test/index.test.ts @@ -0,0 +1,239 @@ +import assert from "node:assert"; +import childProcess from "node:child_process"; +import { existsSync } from "node:fs"; +import fs from "node:fs/promises"; +import os from "node:os"; +import path from "node:path"; +import { + runWranglerDev, + wranglerEntryPath, +} from "../../shared/src/run-wrangler-long-lived"; +import { describe, beforeAll, afterAll, expect, test } from "vitest"; +import { setTimeout } from "node:timers/promises"; +import { fetch } from "undici"; + +async function getTmpDir() { + return fs.mkdtemp(path.join(os.tmpdir(), "wrangler-modules-")); +} + +type WranglerDev = Awaited>; +function get(worker: WranglerDev, pathname: string) { + const url = `http://${worker.ip}:${worker.port}${pathname}`; + // Setting the `MF-Original-URL` header will make Miniflare think this is + // coming from a `dispatchFetch()` request, meaning it won't return the pretty + // error page, and we'll be able to parse errors as JSON. + return fetch(url, { headers: { "MF-Original-URL": url } }); +} + +async function retry(closure: () => Promise, max = 30): Promise { + for (let attempt = 1; attempt <= max; attempt++) { + try { + return await closure(); + } catch (e) { + if (attempt === max) throw e; + } + await setTimeout(1_000); + } + assert.fail("Unreachable"); +} + +describe("find_additional_modules dev", () => { + let tmpDir: string; + let worker: WranglerDev; + + beforeAll(async () => { + // Copy over files to a temporary directory as we'll be modifying them + tmpDir = await getTmpDir(); + await fs.cp( + path.resolve(__dirname, "..", "src"), + path.join(tmpDir, "src"), + { recursive: true } + ); + await fs.cp( + path.resolve(__dirname, "..", "wrangler.toml"), + path.join(tmpDir, "wrangler.toml") + ); + + worker = await runWranglerDev(tmpDir, ["--port=0"]); + }); + afterAll(async () => { + await worker.stop(); + try { + await fs.rm(tmpDir, { recursive: true, force: true }); + } catch (e) { + // It seems that Windows doesn't let us delete this, with errors like: + // + // Error: EBUSY: resource busy or locked, rmdir 'C:\Users\RUNNER~1\AppData\Local\Temp\wrangler-modules-pKJ7OQ' + // ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ + // Serialized Error: { + // "code": "EBUSY", + // "errno": -4082, + // "path": "C:\Users\RUNNER~1\AppData\Local\Temp\wrangler-modules-pKJ7OQ", + // "syscall": "rmdir", + // } + console.error(e); + } + }); + + test("supports bundled modules", async () => { + const res = await get(worker, "/dep"); + expect(await res.text()).toBe("bundled"); + }); + test("supports text modules", async () => { + const res = await get(worker, "/text"); + expect(await res.text()).toBe("test\n"); + }); + test("supports dynamic imports", async () => { + const res = await get(worker, "/dynamic"); + expect(await res.text()).toBe("dynamic"); + }); + test("supports commonjs lazy imports", async () => { + const res = await get(worker, "/common"); + expect(await res.text()).toBe("common"); + }); + test("supports variable dynamic imports", async () => { + const res = await get(worker, "/lang/en"); + expect(await res.text()).toBe("hello"); + }); + + test("watches additional modules", async () => { + const srcDir = path.join(tmpDir, "src"); + + // Update dynamically imported file + await fs.writeFile( + path.join(srcDir, "dynamic.js"), + 'export default "new dynamic";' + ); + await retry(async () => { + const res = await get(worker, "/dynamic"); + assert.strictEqual(await res.text(), "new dynamic"); + }); + + // Delete dynamically imported file + await fs.rm(path.join(srcDir, "lang", "en.js")); + const res = await retry(async () => { + const res = await get(worker, "/lang/en"); + assert.strictEqual(res.status, 500); + return res; + }); + const error = (await res.json()) as { message?: string }; + expect(error.message).toBe('No such module "lang/en.js".'); + + // Create new dynamically imported file in new directory + await fs.mkdir(path.join(srcDir, "lang", "en")); + await fs.writeFile( + path.join(srcDir, "lang", "en", "us.js"), + 'export default { hello: "hey" };' + ); + await retry(async () => { + const res = await get(worker, "/lang/en/us"); + assert.strictEqual(await res.text(), "hey"); + }); + + // Update newly created file + await fs.writeFile( + path.join(srcDir, "lang", "en", "us.js"), + 'export default { hello: "bye" };' + ); + await retry(async () => { + const res = await get(worker, "/lang/en/us"); + assert.strictEqual(await res.text(), "bye"); + }); + }); +}); + +function build(cwd: string, outDir: string) { + return childProcess.spawnSync( + process.execPath, + [wranglerEntryPath, "deploy", "--dry-run", `--outdir=${outDir}`], + { cwd } + ); +} + +describe("find_additional_modules deploy", () => { + let tmpDir: string; + beforeAll(async () => { + tmpDir = await getTmpDir(); + }); + afterAll(async () => { + await fs.rm(tmpDir, { recursive: true, force: true }); + }); + + test("doesn't bundle additional modules", async () => { + const outDir = path.join(tmpDir, "out"); + const result = await build(path.resolve(__dirname, ".."), outDir); + expect(result.status).toBe(0); + + // Check additional modules marked external, but other dependencies bundled + const bundledEntryPath = path.join(outDir, "index.js"); + const bundledEntry = await fs.readFile(bundledEntryPath, "utf8"); + expect(bundledEntry).toMatchInlineSnapshot(` + "// src/dep.ts + var dep_default = \\"bundled\\"; + + // src/index.ts + import text from \\"./text.txt\\"; + import common from \\"./common.cjs\\"; + var src_default = { + async fetch(request) { + const url = new URL(request.url); + if (url.pathname === \\"/dep\\") { + return new Response(dep_default); + } + if (url.pathname === \\"/text\\") { + return new Response(text); + } + if (url.pathname === \\"/common\\") { + return new Response(common); + } + if (url.pathname === \\"/dynamic\\") { + return new Response((await import(\\"./dynamic.js\\")).default); + } + if (url.pathname.startsWith(\\"/lang/\\")) { + const language = \\"./lang/\\" + url.pathname.substring(\\"/lang/\\".length) + \\".js\\"; + return new Response((await import(language)).default.hello); + } + return new Response(\\"Not Found\\", { status: 404 }); + } + }; + export { + src_default as default + }; + //# sourceMappingURL=index.js.map + " + `); + + // Check additional modules included in output + expect(existsSync(path.join(outDir, "text.txt"))).toBe(true); + expect(existsSync(path.join(outDir, "dynamic.js"))).toBe(true); + expect(existsSync(path.join(outDir, "lang", "en.js"))).toBe(true); + expect(existsSync(path.join(outDir, "lang", "fr.js"))).toBe(true); + }); + + test("fails with service worker entrypoint", async () => { + // Write basic service worker with `find_additional_modules` enabled + const serviceWorkerDir = path.join(tmpDir, "service-worker"); + await fs.mkdir(serviceWorkerDir, { recursive: true }); + await fs.writeFile( + path.join(serviceWorkerDir, "index.js"), + "addEventListener('fetch', (e) => e.respondWith(new Response()))" + ); + await fs.writeFile( + path.join(serviceWorkerDir, "wrangler.toml"), + [ + 'name="service-worker-test"', + 'main = "index.js"', + 'compatibility_date = "2023-08-01"', + "find_additional_modules = true", + ].join("\n") + ); + + // Try build, and check fails + const serviceWorkerOutDir = path.join(tmpDir, "service-worker-out"); + const result = await build(serviceWorkerDir, serviceWorkerOutDir); + expect(result.status).toBe(1); + expect(result.stderr.toString()).toContain( + "`find_additional_modules` can only be used with an ES module entrypoint." + ); + }); +}); diff --git a/fixtures/additional-modules/test/tsconfig.json b/fixtures/additional-modules/test/tsconfig.json new file mode 100644 index 000000000000..d2ce7f144694 --- /dev/null +++ b/fixtures/additional-modules/test/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "@cloudflare/workers-tsconfig/tsconfig.json", + "compilerOptions": { + "types": ["node"] + }, + "include": ["**/*.ts", "../../../node-types.d.ts"] +} diff --git a/fixtures/additional-modules/tsconfig.json b/fixtures/additional-modules/tsconfig.json new file mode 100644 index 000000000000..873892f2c579 --- /dev/null +++ b/fixtures/additional-modules/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "module": "esnext", + "target": "esnext", + "lib": ["esnext"], + "strict": true, + "isolatedModules": true, + "noEmit": true, + "types": ["@cloudflare/workers-types/experimental"], + "allowJs": true, + "allowSyntheticDefaultImports": true + }, + "include": ["src"] +} diff --git a/fixtures/additional-modules/wrangler.toml b/fixtures/additional-modules/wrangler.toml new file mode 100644 index 000000000000..602f4ea5ff08 --- /dev/null +++ b/fixtures/additional-modules/wrangler.toml @@ -0,0 +1,9 @@ +name = "additional-modules" +main = "src/index.ts" +compatibility_date = "2023-08-01" + +find_additional_modules = true +rules = [ + { type = "CommonJS", globs = ["**/*.cjs"]}, + { type = "ESModule", globs = ["**/*.js"]}, +] diff --git a/fixtures/no-bundle-import/src/index.test.ts b/fixtures/no-bundle-import/src/index.test.ts index 492efe0c0a10..4bcc1ff97c51 100644 --- a/fixtures/no-bundle-import/src/index.test.ts +++ b/fixtures/no-bundle-import/src/index.test.ts @@ -7,9 +7,7 @@ describe("Worker", () => { let worker: UnstableDevWorker; beforeAll(async () => { - worker = await unstable_dev(path.resolve(__dirname, "index.js"), { - bundle: false, - }); + worker = await unstable_dev(path.resolve(__dirname, "index.js")); }, 30_000); afterAll(() => worker.stop()); @@ -73,4 +71,10 @@ describe("Worker", () => { expected.set([0, 1, 2, 10]); expect(new Uint8Array(bin)).toEqual(expected); }); + + test("actual dynamic import (that cannot be inlined by an esbuild run)", async () => { + const resp = await worker.fetch("/lang/fr.json"); + const text = await resp.text(); + expect(text).toMatchInlineSnapshot('"Bonjour"'); + }); }); diff --git a/fixtures/no-bundle-import/wrangler.toml b/fixtures/no-bundle-import/wrangler.toml index 7a735e091af5..15de3088ea9e 100644 --- a/fixtures/no-bundle-import/wrangler.toml +++ b/fixtures/no-bundle-import/wrangler.toml @@ -1,6 +1,7 @@ name = "no-bundle-import" main = "src/index.js" compatibility_date = "2023-02-20" +no_bundle = true rules = [ { type = "CommonJS", globs = ["nested/say-hello.js", "**/*.cjs"]}, diff --git a/fixtures/shared/package.json b/fixtures/shared/package.json index 2d9ac8d64cf8..51a6e9b688fb 100644 --- a/fixtures/shared/package.json +++ b/fixtures/shared/package.json @@ -2,5 +2,8 @@ "name": "fixtures-shared", "version": "0.0.0", "private": true, - "description": "Shared fixtures for testing" + "description": "Shared fixtures for testing", + "devDependencies": { + "wrangler": "workspace:*" + } } diff --git a/fixtures/shared/src/run-wrangler-long-lived.ts b/fixtures/shared/src/run-wrangler-long-lived.ts index 70487f9f5e75..77e33e4294f6 100644 --- a/fixtures/shared/src/run-wrangler-long-lived.ts +++ b/fixtures/shared/src/run-wrangler-long-lived.ts @@ -1,4 +1,10 @@ import { fork } from "node:child_process"; +import path from "node:path"; + +export const wranglerEntryPath = path.resolve( + __dirname, + "../../../packages/wrangler/bin/wrangler.js" +); /** * Runs the command `wrangler pages dev` in a child process. @@ -38,14 +44,10 @@ async function runLongLivedWrangler(command: string[], cwd: string) { rejectReadyPromise = reject; }); - const wranglerProcess = fork( - "../../packages/wrangler/bin/wrangler.js", - command, - { - stdio: [/*stdin*/ "ignore", /*stdout*/ "pipe", /*stderr*/ "pipe", "ipc"], - cwd, - } - ).on("message", (message) => { + const wranglerProcess = fork(wranglerEntryPath, command, { + stdio: [/*stdin*/ "ignore", /*stdout*/ "pipe", /*stderr*/ "pipe", "ipc"], + cwd, + }).on("message", (message) => { if (settledReadyPromise) return; settledReadyPromise = true; clearTimeout(timeoutHandle); diff --git a/packages/workers-playground/src/QuickEditor/module-collection.ts b/packages/workers-playground/src/QuickEditor/module-collection.ts index 46320970eb1a..6b505a789707 100644 --- a/packages/workers-playground/src/QuickEditor/module-collection.ts +++ b/packages/workers-playground/src/QuickEditor/module-collection.ts @@ -106,7 +106,7 @@ export function parseRules(userRules: Rule[] = []) { completedRuleLocations[rule.type] }, ${JSON.stringify( rules[completedRuleLocations[rule.type]] - )}). This rule will be ignored. To the previous rule, add \`fallthrough = true\` to allow this one to also be used, or \`fallthrough = false\` to silence this warning.` + )}). This rule will be ignored. To use the previous rule, add \`fallthrough = true\` to allow this one to also be used, or \`fallthrough = false\` to silence this warning.` ); } else { console.warn( @@ -116,7 +116,7 @@ export function parseRules(userRules: Rule[] = []) { completedRuleLocations[rule.type] }, ${JSON.stringify( rules[completedRuleLocations[rule.type]] - )}). This rule will be ignored. To the previous rule, add \`fallthrough = true\` to allow the default one to also be used, or \`fallthrough = false\` to silence this warning.` + )}). This rule will be ignored. To use the previous rule, add \`fallthrough = true\` to allow the default one to also be used, or \`fallthrough = false\` to silence this warning.` ); } } diff --git a/packages/wrangler/src/__tests__/d1/timeTravel.test.ts b/packages/wrangler/src/__tests__/d1/timeTravel.test.ts index 789e9e9c19d0..1d1e0311e92f 100644 --- a/packages/wrangler/src/__tests__/d1/timeTravel.test.ts +++ b/packages/wrangler/src/__tests__/d1/timeTravel.test.ts @@ -1,8 +1,10 @@ +import { mockConsoleMethods } from "../helpers/mock-console"; import { useMockIsTTY } from "../helpers/mock-istty"; import { runWrangler } from "../helpers/run-wrangler"; import writeWranglerToml from "../helpers/write-wrangler-toml"; describe("time-travel", () => { + mockConsoleMethods(); const { setIsTTY } = useMockIsTTY(); describe("restore", () => { diff --git a/packages/wrangler/src/__tests__/deploy.test.ts b/packages/wrangler/src/__tests__/deploy.test.ts index bdaaa89cda24..e85c5c2c1bed 100644 --- a/packages/wrangler/src/__tests__/deploy.test.ts +++ b/packages/wrangler/src/__tests__/deploy.test.ts @@ -2211,8 +2211,10 @@ addEventListener('fetch', event => {});` it("when using a module worker type, it should add an asset manifest module, and bind to a namespace", async () => { const assets = [ - { filePath: "file-1.txt", content: "Content of file-1" }, - { filePath: "file-2.txt", content: "Content of file-2" }, + // Using `.text` extension instead of `.txt` means files won't be + // treated as additional modules + { filePath: "file-1.text", content: "Content of file-1" }, + { filePath: "file-2.text", content: "Content of file-2" }, ]; const kvNamespace = { title: "__test-name-workers_sites_assets", @@ -2223,8 +2225,27 @@ addEventListener('fetch', event => {});` site: { bucket: "assets", }, + find_additional_modules: true, + rules: [{ type: "ESModule", globs: ["**/*.mjs"] }], }); writeWorkerSource({ type: "esm" }); + fs.mkdirSync("a/b/c", { recursive: true }); + fs.writeFileSync( + "a/1.mjs", + 'export { default } from "__STATIC_CONTENT_MANIFEST";' + ); + fs.writeFileSync( + "a/b/2.mjs", + 'export { default } from "__STATIC_CONTENT_MANIFEST";' + ); + fs.writeFileSync( + "a/b/3.mjs", + 'export { default } from "__STATIC_CONTENT_MANIFEST";' + ); + fs.writeFileSync( + "a/b/c/4.mjs", + 'export { default } from "__STATIC_CONTENT_MANIFEST";' + ); writeAssets(assets); mockUploadWorkerRequest({ expectedBindings: [ @@ -2236,7 +2257,13 @@ addEventListener('fetch', event => {});` ], expectedModules: { __STATIC_CONTENT_MANIFEST: - '{"file-1.txt":"file-1.2ca234f380.txt","file-2.txt":"file-2.5938485188.txt"}', + '{"file-1.text":"file-1.2ca234f380.text","file-2.text":"file-2.5938485188.text"}', + "a/__STATIC_CONTENT_MANIFEST": + 'export { default } from "../__STATIC_CONTENT_MANIFEST";', + "a/b/__STATIC_CONTENT_MANIFEST": + 'export { default } from "../../__STATIC_CONTENT_MANIFEST";', + "a/b/c/__STATIC_CONTENT_MANIFEST": + 'export { default } from "../../../__STATIC_CONTENT_MANIFEST";', }, }); mockSubDomainRequest(); @@ -2247,10 +2274,15 @@ addEventListener('fetch', event => {});` await runWrangler("deploy"); expect(std.info).toMatchInlineSnapshot(` - "Fetching list of already uploaded assets... + "Attaching additional modules: + - a/1.mjs (esm) + - a/b/2.mjs (esm) + - a/b/3.mjs (esm) + - a/b/c/4.mjs (esm) + Fetching list of already uploaded assets... Building list of assets to upload... - + file-1.2ca234f380.txt (uploading new version of file-1.txt) - + file-2.5938485188.txt (uploading new version of file-2.txt) + + file-1.2ca234f380.text (uploading new version of file-1.text) + + file-2.5938485188.text (uploading new version of file-2.text) Uploading 2 new assets... Uploaded 100% [2 out of 2]" `); @@ -7115,10 +7147,10 @@ addEventListener('fetch', event => {});` ); // and the warnings because fallthrough was not explicitly set expect(std.warn).toMatchInlineSnapshot(` - "▲ [WARNING] The module rule at position 1 ({\\"type\\":\\"Text\\",\\"globs\\":[\\"**/*.other\\"]}) has the same type as a previous rule (at position 0, {\\"type\\":\\"Text\\",\\"globs\\":[\\"**/*.file\\"]}). This rule will be ignored. To the previous rule, add \`fallthrough = true\` to allow this one to also be used, or \`fallthrough = false\` to silence this warning. + "▲ [WARNING] The module rule at position 1 ({\\"type\\":\\"Text\\",\\"globs\\":[\\"**/*.other\\"]}) has the same type as a previous rule (at position 0, {\\"type\\":\\"Text\\",\\"globs\\":[\\"**/*.file\\"]}). This rule will be ignored. To use the previous rule, add \`fallthrough = true\` to allow this one to also be used, or \`fallthrough = false\` to silence this warning. - ▲ [WARNING] The default module rule {\\"type\\":\\"Text\\",\\"globs\\":[\\"**/*.txt\\",\\"**/*.html\\"]} has the same type as a previous rule (at position 0, {\\"type\\":\\"Text\\",\\"globs\\":[\\"**/*.file\\"]}). This rule will be ignored. To the previous rule, add \`fallthrough = true\` to allow the default one to also be used, or \`fallthrough = false\` to silence this warning. + ▲ [WARNING] The default module rule {\\"type\\":\\"Text\\",\\"globs\\":[\\"**/*.txt\\",\\"**/*.html\\"]} has the same type as a previous rule (at position 0, {\\"type\\":\\"Text\\",\\"globs\\":[\\"**/*.file\\"]}). This rule will be ignored. To use the previous rule, add \`fallthrough = true\` to allow the default one to also be used, or \`fallthrough = false\` to silence this warning. " `); diff --git a/packages/wrangler/src/__tests__/traverse-module-graph.test.ts b/packages/wrangler/src/__tests__/find-additional-modules.test.ts similarity index 73% rename from packages/wrangler/src/__tests__/traverse-module-graph.test.ts rename to packages/wrangler/src/__tests__/find-additional-modules.test.ts index e028ade0be1f..803f7146f79e 100644 --- a/packages/wrangler/src/__tests__/traverse-module-graph.test.ts +++ b/packages/wrangler/src/__tests__/find-additional-modules.test.ts @@ -1,7 +1,7 @@ import { mkdir, writeFile } from "fs/promises"; import path from "path"; import dedent from "ts-dedent"; -import traverseModuleGraph from "../deployment-bundle/traverse-module-graph"; +import { findAdditionalModules } from "../deployment-bundle/find-additional-modules"; import { mockConsoleMethods } from "./helpers/mock-console"; import { runInTempDir } from "./helpers/run-in-tmp"; import type { ConfigModuleRuleType } from "../config"; @@ -36,7 +36,7 @@ describe("traverse module graph", () => { ` ); - const bundle = await traverseModuleGraph( + const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./index.js"), directory: process.cwd(), @@ -46,7 +46,7 @@ describe("traverse module graph", () => { [] ); - expect(bundle.modules).toStrictEqual([]); + expect(modules).toStrictEqual([]); }); it.each([ @@ -71,7 +71,7 @@ describe("traverse module graph", () => { ` ); - const bundle = await traverseModuleGraph( + const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./index.js"), directory: process.cwd(), @@ -81,7 +81,7 @@ describe("traverse module graph", () => { [{ type: type as ConfigModuleRuleType, globs: ["**/*.js"] }] ); - expect(bundle.modules[0].type).toStrictEqual(format); + expect(modules[0].type).toStrictEqual(format); }); it("should not resolve JS outside the module root", async () => { @@ -104,7 +104,7 @@ describe("traverse module graph", () => { ` ); - const bundle = await traverseModuleGraph( + const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./src/nested/index.js"), directory: path.join(process.cwd(), "./src/nested"), @@ -115,7 +115,7 @@ describe("traverse module graph", () => { [{ type: "ESModule", globs: ["**/*.js"] }] ); - expect(bundle.modules).toStrictEqual([]); + expect(modules).toStrictEqual([]); }); it("should resolve JS with module root", async () => { @@ -138,7 +138,7 @@ describe("traverse module graph", () => { ` ); - const bundle = await traverseModuleGraph( + const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./src/nested/index.js"), directory: path.join(process.cwd(), "./src/nested"), @@ -149,7 +149,7 @@ describe("traverse module graph", () => { [{ type: "ESModule", globs: ["**/*.js"] }] ); - expect(bundle.modules[0].name).toStrictEqual("other.js"); + expect(modules[0].name).toStrictEqual("other.js"); }); it("should ignore files not matched by glob", async () => { @@ -172,7 +172,7 @@ describe("traverse module graph", () => { ` ); - const bundle = await traverseModuleGraph( + const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./src/nested/index.js"), directory: path.join(process.cwd(), "./src/nested"), @@ -183,7 +183,7 @@ describe("traverse module graph", () => { [{ type: "ESModule", globs: ["**/*.mjs"] }] ); - expect(bundle.modules.length).toStrictEqual(0); + expect(modules.length).toStrictEqual(0); }); it("should resolve files that match the default rules", async () => { @@ -206,7 +206,7 @@ describe("traverse module graph", () => { ` ); - const bundle = await traverseModuleGraph( + const modules = await findAdditionalModules( { file: path.join(process.cwd(), "./src/index.js"), directory: path.join(process.cwd(), "./src"), @@ -217,6 +217,44 @@ describe("traverse module graph", () => { [] ); - expect(bundle.modules[0].name).toStrictEqual("other.txt"); + expect(modules[0].name).toStrictEqual("other.txt"); + }); + + it("should error if a rule is ignored because the previous was not marked 'fall-through'", async () => { + await mkdir("./src", { recursive: true }); + await writeFile( + "./src/index.js", + dedent/* javascript */ ` + export default { + async fetch(request) { + return new Response(HELLO) + } + } + ` + ); + await writeFile( + "./src/other.txt", + dedent/* javascript */ ` + export const HELLO = "WORLD" + ` + ); + + await expect( + findAdditionalModules( + { + file: path.join(process.cwd(), "./src/index.js"), + directory: path.join(process.cwd(), "./src"), + format: "modules", + // The default module root is dirname(file) + moduleRoot: path.join(process.cwd(), "./src"), + }, + [ + { type: "Text", globs: ["**/*.txt"] }, + { type: "Text", globs: ["other.txt"] }, + ] + ) + ).rejects.toMatchInlineSnapshot( + `[Error: The file other.txt matched a module rule in your configuration ({"type":"Text","globs":["other.txt"]}), but was ignored because a previous rule with the same type was not marked as \`fallthrough = true\`.]` + ); }); }); diff --git a/packages/wrangler/src/api/pages/deploy.tsx b/packages/wrangler/src/api/pages/deploy.tsx index 7d27ef9d8b69..b7d691290661 100644 --- a/packages/wrangler/src/api/pages/deploy.tsx +++ b/packages/wrangler/src/api/pages/deploy.tsx @@ -277,7 +277,6 @@ export async function deploy({ stop: undefined, resolvedEntryPointPath: _workerPath, bundleType: "esm", - moduleCollector: undefined, }; } } diff --git a/packages/wrangler/src/config/environment.ts b/packages/wrangler/src/config/environment.ts index b88e87f7d8bb..6898cfdfd4af 100644 --- a/packages/wrangler/src/config/environment.ts +++ b/packages/wrangler/src/config/environment.ts @@ -77,8 +77,19 @@ interface EnvironmentInheritable { main: string | undefined; /** - * The directory in which module rules should be evaluated in a `--no-bundle` worker - * This defaults to dirname(main) when left undefined + * If true then Wrangler will traverse the file tree below `base_dir`; + * Any files that match `rules` will be included in the deployed worker. + * Defaults to true if `no_bundle` is true, otherwise false. + * + * @inheritable + */ + find_additional_modules: boolean | undefined; + + /** + * The directory in which module rules should be evaluated when including additional files into a worker deployment. + * This defaults to the directory containing the `main` entry point of the worker if not specified. + * + * @inheritable */ base_dir: string | undefined; diff --git a/packages/wrangler/src/config/validation.ts b/packages/wrangler/src/config/validation.ts index 0be540bff217..8c0d21394e5c 100644 --- a/packages/wrangler/src/config/validation.ts +++ b/packages/wrangler/src/config/validation.ts @@ -1118,6 +1118,14 @@ function normalizeAndValidateEnvironment( ), deprecatedUpload ), + find_additional_modules: inheritable( + diagnostics, + topLevelEnv, + rawEnv, + "find_additional_modules", + isBoolean, + undefined + ), base_dir: normalizeAndValidateBaseDirField( configPath, inheritable( diff --git a/packages/wrangler/src/deploy/deploy.ts b/packages/wrangler/src/deploy/deploy.ts index 8cb034531a21..0321f2ee433f 100644 --- a/packages/wrangler/src/deploy/deploy.ts +++ b/packages/wrangler/src/deploy/deploy.ts @@ -11,8 +11,16 @@ import { printBundleSize, printOffendingDependencies, } from "../deployment-bundle/bundle-reporter"; +import { getBundleType } from "../deployment-bundle/bundle-type"; import { createWorkerUploadForm } from "../deployment-bundle/create-worker-upload-form"; -import traverseModuleGraph from "../deployment-bundle/traverse-module-graph"; +import { + findAdditionalModules, + writeAdditionalModules, +} from "../deployment-bundle/find-additional-modules"; +import { + createModuleCollector, + getWrangler1xLegacyModuleReferences, +} from "../deployment-bundle/module-collection"; import { addHyphens } from "../deployments"; import { confirm } from "../dialogs"; import { getMigrationsToUpload } from "../durable"; @@ -30,6 +38,7 @@ import type { ZoneIdRoute, ZoneNameRoute, CustomDomainRoute, + Rule, } from "../config/environment"; import type { Entry } from "../deployment-bundle/entry"; import type { CfWorkerInit, CfPlacement } from "../deployment-bundle/worker"; @@ -452,45 +461,54 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m ); } - const { - modules, - dependencies, - resolvedEntryPointPath, - bundleType, - }: Awaited> = props.noBundle - ? await traverseModuleGraph(props.entry, props.rules) - : await bundleWorker( - props.entry, - typeof destination === "string" ? destination : destination.path, - { - serveAssetsFromWorker: - !props.isWorkersSite && Boolean(props.assetPaths), - doBindings: config.durable_objects.bindings, - jsxFactory, - jsxFragment, - rules: props.rules, - tsconfig: props.tsconfig ?? config.tsconfig, - minify, - legacyNodeCompat, - nodejsCompat, - define: { ...config.define, ...props.defines }, - checkFetch: false, - assets: config.assets && { - ...config.assets, + const entryDirectory = path.dirname(props.entry.file); + const moduleCollector = createModuleCollector({ + wrangler1xLegacyModuleReferences: getWrangler1xLegacyModuleReferences( + entryDirectory, + props.entry.file + ), + entry: props.entry, + // `moduleCollector` doesn't get used when `props.noBundle` is set, so + // `findAdditionalModules` always defaults to `false` + findAdditionalModules: config.find_additional_modules ?? false, + rules: props.rules, + }); + + const { modules, dependencies, resolvedEntryPointPath, bundleType } = + props.noBundle + ? await noBundleWorker(props.entry, props.rules, props.outDir) + : await bundleWorker( + props.entry, + typeof destination === "string" ? destination : destination.path, + { + bundle: true, + additionalModules: [], + moduleCollector, + serveAssetsFromWorker: + !props.isWorkersSite && Boolean(props.assetPaths), + doBindings: config.durable_objects.bindings, + jsxFactory, + jsxFragment, + tsconfig: props.tsconfig ?? config.tsconfig, + minify, + legacyNodeCompat, + nodejsCompat, + define: { ...config.define, ...props.defines }, + checkFetch: false, + assets: config.assets, // enable the cache when publishing - bypassCache: false, - }, - services: config.services, - // We don't set workerDefinitions here, - // because we don't want to apply the dev-time - // facades on top of it - workerDefinitions: undefined, - // We want to know if the build is for development or publishing - // This could potentially cause issues as we no longer have identical behaviour between dev and deploy? - targetConsumer: "deploy", - local: false, - } - ); + bypassAssetCache: false, + services: config.services, + // We don't set workerDefinitions here, + // because we don't want to apply the dev-time + // facades on top of it + workerDefinitions: undefined, + // We want to know if the build is for development or publishing + // This could potentially cause issues as we no longer have identical behaviour between dev and deploy? + targetConsumer: "deploy", + local: false, + } + ); const content = readFileSync(resolvedEntryPointPath, { encoding: "utf-8", @@ -1062,3 +1080,21 @@ function updateQueueConsumers(config: Config): Promise[] { ); }); } + +async function noBundleWorker( + entry: Entry, + rules: Rule[], + outDir: string | undefined +) { + const modules = await findAdditionalModules(entry, rules); + if (outDir) { + await writeAdditionalModules(modules, outDir); + } + + return { + modules, + dependencies: {}, + resolvedEntryPointPath: entry.file, + bundleType: getBundleType(entry.format), + }; +} diff --git a/packages/wrangler/src/deployment-bundle/apply-middleware.ts b/packages/wrangler/src/deployment-bundle/apply-middleware.ts new file mode 100644 index 000000000000..3159272b2e09 --- /dev/null +++ b/packages/wrangler/src/deployment-bundle/apply-middleware.ts @@ -0,0 +1,164 @@ +import * as fs from "node:fs"; +import * as path from "node:path"; +import { getBasePath } from "../paths"; +import { dedent } from "../utils/dedent"; +import type { DurableObjectBindings } from "../config/environment"; +import type { Entry } from "./entry"; + +/** + * A facade that acts as a "middleware loader". + * Instead of needing to apply a facade for each individual middleware, this allows + * middleware to be written in a more traditional manner and then be applied all + * at once, requiring just two esbuild steps, rather than 1 per middleware. + */ +export interface MiddlewareLoader { + name: string; + path: string; + // This will be provided as a virtual module at `config:middleware/${name}`, + // where `name` is the name of this middleware, and the module contains + // named exports for each property on the `config` record. + config?: Record; +} + +export async function applyMiddlewareLoaderFacade( + entry: Entry, + tmpDirPath: string, + middleware: MiddlewareLoader[], + doBindings: DurableObjectBindings +): Promise<{ entry: Entry; inject?: string[] }> { + // Firstly we need to insert the middleware array into the project, + // and then we load the middleware - this insertion and loading is + // different for each format. + // Make sure we resolve all files relative to the actual temporary directory, + // otherwise we'll have issues with source maps + tmpDirPath = fs.realpathSync(tmpDirPath); + + // We need to import each of the middlewares, so we need to generate a + // random, unique identifier that we can use for the import. + // Middlewares are required to be default exports so we can import to any name. + const middlewareIdentifiers = middleware.map((m, index) => [ + `__MIDDLEWARE_${index}__`, + path.resolve(getBasePath(), m.path), + ]); + + const dynamicFacadePath = path.join( + tmpDirPath, + "middleware-insertion-facade.js" + ); + const imports = middlewareIdentifiers + .map( + ([id, middlewarePath]) => + /*javascript*/ `import * as ${id} from "${prepareFilePath( + middlewarePath + )}";` + ) + .join("\n"); + + const middlewareFns = middlewareIdentifiers.map(([m]) => `${m}.default`); + + if (entry.format === "modules") { + const middlewareWrappers = middlewareIdentifiers + .map(([m]) => `${m}.wrap`) + .join(","); + + const durableObjects = doBindings + // Don't shim anything not local to this worker + .filter((b) => !b.script_name) + // Reexport the DO classnames + .map( + (b) => + /*javascript*/ `export const ${b.class_name} = maskDurableObjectDefinition(OTHER_EXPORTS.${b.class_name});` + ) + .join("\n"); + await fs.promises.writeFile( + dynamicFacadePath, + dedent/*javascript*/ ` + import worker, * as OTHER_EXPORTS from "${prepareFilePath(entry.file)}"; + ${imports} + const envWrappers = [${middlewareWrappers}].filter(Boolean); + const facade = { + ...worker, + envWrappers, + middleware: [ + ${middlewareFns.join(",")}, + ...(worker.middleware ? worker.middleware : []), + ].filter(Boolean) + } + export * from "${prepareFilePath(entry.file)}"; + + const maskDurableObjectDefinition = (cls) => + class extends cls { + constructor(state, env) { + let wrappedEnv = env + for (const wrapFn of envWrappers) { + wrappedEnv = wrapFn(wrappedEnv) + } + super(state, wrappedEnv); + } + }; + ${durableObjects} + + export default facade; + ` + ); + + const targetPathLoader = path.join( + tmpDirPath, + "middleware-loader.entry.ts" + ); + const loaderPath = path.resolve( + getBasePath(), + "templates/middleware/loader-modules.ts" + ); + + const baseLoader = await fs.promises.readFile(loaderPath, "utf-8"); + const transformedLoader = baseLoader + .replaceAll("__ENTRY_POINT__", prepareFilePath(dynamicFacadePath)) + .replace( + "./common", + prepareFilePath( + path.resolve(getBasePath(), "templates/middleware/common.ts") + ) + ); + + await fs.promises.writeFile(targetPathLoader, transformedLoader); + + return { + entry: { + ...entry, + file: targetPathLoader, + }, + }; + } else { + const loaderSwPath = path.resolve( + getBasePath(), + "templates/middleware/loader-sw.ts" + ); + + await fs.promises.writeFile( + dynamicFacadePath, + dedent/*javascript*/ ` + import { __facade_registerInternal__ } from "${prepareFilePath(loaderSwPath)}"; + ${imports} + __facade_registerInternal__([${middlewareFns}]) + ` + ); + + return { + entry, + inject: [dynamicFacadePath], + }; + } +} + +/** + * Process the given file path to ensure it will work on all OSes. + * + * Windows paths contain backslashes, which are taken to be escape characters + * when inserted directly into source code. + * This function will escape these backslashes to make sure they work in all OSes. + * + */ +function prepareFilePath(filePath: string): string { + return JSON.stringify(filePath).slice(1, -1); +} diff --git a/packages/wrangler/src/deployment-bundle/build-failures.ts b/packages/wrangler/src/deployment-bundle/build-failures.ts new file mode 100644 index 000000000000..f2da1c50bf1b --- /dev/null +++ b/packages/wrangler/src/deployment-bundle/build-failures.ts @@ -0,0 +1,54 @@ +import { builtinModules } from "node:module"; +import type * as esbuild from "esbuild"; + +/** + * RegExp matching against esbuild's error text when it is unable to resolve + * a Node built-in module. If we detect this when node_compat is disabled, + * we'll rewrite the error to suggest enabling it. + */ +const nodeBuiltinResolveErrorText = new RegExp( + '^Could not resolve "(' + + builtinModules.join("|") + + "|" + + builtinModules.map((module) => "node:" + module).join("|") + + ')"$' +); +/** + * Rewrites esbuild BuildFailures for failing to resolve Node built-in modules + * to suggest enabling Node compat as opposed to `platform: "node"`. + */ +export function rewriteNodeCompatBuildFailure( + errors: esbuild.Message[], + forPages = false +) { + for (const error of errors) { + const match = nodeBuiltinResolveErrorText.exec(error.text); + if (match !== null) { + const issue = `The package "${match[1]}" wasn't found on the file system but is built into node.`; + + const instructionForUser = `${ + forPages + ? 'Add the "nodejs_compat" compatibility flag to your Pages project' + : 'Add "node_compat = true" to your wrangler.toml file' + } to enable Node.js compatibility.`; + + error.notes = [ + { + location: null, + text: `${issue}\n${instructionForUser}`, + }, + ]; + } + } +} +/** + * Returns true if the passed value looks like an esbuild BuildFailure object + */ +export function isBuildFailure(err: unknown): err is esbuild.BuildFailure { + return ( + typeof err === "object" && + err !== null && + "errors" in err && + "warnings" in err + ); +} diff --git a/packages/wrangler/src/deployment-bundle/bundle-type.ts b/packages/wrangler/src/deployment-bundle/bundle-type.ts new file mode 100644 index 000000000000..c261debc017c --- /dev/null +++ b/packages/wrangler/src/deployment-bundle/bundle-type.ts @@ -0,0 +1,8 @@ +import type { CfModuleType, CfScriptFormat } from "./worker"; + +/** + * Compute the entry-point type from the bundle format. + */ +export function getBundleType(format: CfScriptFormat): CfModuleType { + return format === "modules" ? "esm" : "commonjs"; +} diff --git a/packages/wrangler/src/deployment-bundle/bundle.ts b/packages/wrangler/src/deployment-bundle/bundle.ts index 9cd7edadc91d..71713a1462ed 100644 --- a/packages/wrangler/src/deployment-bundle/bundle.ts +++ b/packages/wrangler/src/deployment-bundle/bundle.ts @@ -1,23 +1,28 @@ import * as fs from "node:fs"; -import { builtinModules } from "node:module"; import * as path from "node:path"; import NodeGlobalsPolyfills from "@esbuild-plugins/node-globals-polyfill"; import NodeModulesPolyfills from "@esbuild-plugins/node-modules-polyfill"; import * as esbuild from "esbuild"; import tmp from "tmp-promise"; -import createModuleCollector from "../module-collection"; import { getBasePath } from "../paths"; -import { dedent } from "../utils/dedent"; +import { applyMiddlewareLoaderFacade } from "./apply-middleware"; +import { + isBuildFailure, + rewriteNodeCompatBuildFailure, +} from "./build-failures"; +import { dedupeModulesByName } from "./dedupe-modules"; import { getEntryPointFromMetafile } from "./entry-point-from-metafile"; import { cloudflareInternalPlugin } from "./esbuild-plugins/cloudflare-internal"; import { configProviderPlugin } from "./esbuild-plugins/config-provider"; import { nodejsCompatPlugin } from "./esbuild-plugins/nodejs-compat"; +import { writeAdditionalModules } from "./find-additional-modules"; +import { noopModuleCollector } from "./module-collection"; import type { Config } from "../config"; import type { DurableObjectBindings } from "../config/environment"; import type { WorkerRegistry } from "../dev-registry"; -import type { SourceMapMetadata } from "../inspect"; -import type { ModuleCollector } from "../module-collection"; +import type { MiddlewareLoader } from "./apply-middleware"; import type { Entry } from "./entry"; +import type { ModuleCollector } from "./module-collection"; import type { CfModule } from "./worker"; export const COMMON_ESBUILD_OPTIONS = { @@ -26,6 +31,15 @@ export const COMMON_ESBUILD_OPTIONS = { loader: { ".js": "jsx", ".mjs": "jsx", ".cjs": "jsx" }, } as const; +/** + * Information about Wrangler's bundling process that needs passed through + * for DevTools sourcemap transformation + */ +export interface SourceMapMetadata { + tmpDir: string; + entryDirectory: string; +} + export type BundleResult = { modules: CfModule[]; dependencies: esbuild.Metafile["outputs"][string]["inputs"]; @@ -34,81 +48,41 @@ export type BundleResult = { stop: (() => Promise) | undefined; sourceMapPath?: string | undefined; sourceMapMetadata?: SourceMapMetadata | undefined; - moduleCollector: ModuleCollector | undefined; }; -export type StaticAssetsConfig = - | (Config["assets"] & { - bypassCache: boolean | undefined; - }) - | undefined; - -/** - * When applying the middleware facade for service workers, we need to inject - * some code at the top of the final output bundle. Applying an inject too early - * will allow esbuild to reorder the code. Additionally, we need to make sure - * user code is bundled in the final esbuild step with `watch` correctly - * configured, so code changes are detected. - * - * This type is used as the return type for the `MiddlewareFn` type representing - * a facade-applying function. Returned injects should be injected with the - * final esbuild step. - */ -type EntryWithInject = Entry & { inject?: string[] }; - -/** - * RegExp matching against esbuild's error text when it is unable to resolve - * a Node built-in module. If we detect this when node_compat is disabled, - * we'll rewrite the error to suggest enabling it. - */ -const nodeBuiltinResolveErrorText = new RegExp( - '^Could not resolve "(' + - builtinModules.join("|") + - "|" + - builtinModules.map((module) => "node:" + module).join("|") + - ')"$' -); - -/** - * Returns true if the passed value looks like an esbuild BuildFailure object - */ -export function isBuildFailure(err: unknown): err is esbuild.BuildFailure { - return ( - typeof err === "object" && - err !== null && - "errors" in err && - "warnings" in err - ); -} - -/** - * Rewrites esbuild BuildFailures for failing to resolve Node built-in modules - * to suggest enabling Node compat as opposed to `platform: "node"`. - */ -export function rewriteNodeCompatBuildFailure( - errors: esbuild.Message[], - forPages = false -) { - for (const error of errors) { - const match = nodeBuiltinResolveErrorText.exec(error.text); - if (match !== null) { - const issue = `The package "${match[1]}" wasn't found on the file system but is built into node.`; - - const instructionForUser = `${ - forPages - ? 'Add the "nodejs_compat" compatibility flag to your Pages project' - : 'Add "node_compat = true" to your wrangler.toml file' - } to enable Node.js compatibility.`; - - error.notes = [ - { - location: null, - text: `${issue}\n${instructionForUser}`, - }, - ]; - } - } -} +export type BundleOptions = { + // When `bundle` is set to false, we apply shims to the Worker, but won't pull in any imports + bundle: boolean; + // Known additional modules provided by the outside. + additionalModules: CfModule[]; + // A module collector enables you to observe what modules are in the Worker. + moduleCollector: ModuleCollector; + serveAssetsFromWorker: boolean; + assets?: Config["assets"]; + bypassAssetCache?: boolean; + doBindings: DurableObjectBindings; + jsxFactory?: string; + jsxFragment?: string; + entryName?: string; + watch?: boolean; + tsconfig?: string; + minify?: boolean; + legacyNodeCompat?: boolean; + nodejsCompat?: boolean; + define: Config["define"]; + checkFetch: boolean; + services?: Config["services"]; + workerDefinitions?: WorkerRegistry; + targetConsumer: "dev" | "deploy"; + testScheduled?: boolean; + inject?: string[]; + loader?: Record; + sourcemap?: esbuild.CommonOptions["sourcemap"]; + plugins?: esbuild.Plugin[]; + isOutfile?: boolean; + forPages?: boolean; + local: boolean; +}; /** * Generate a bundle for the worker identified by the arguments passed in. @@ -116,54 +90,24 @@ export function rewriteNodeCompatBuildFailure( export async function bundleWorker( entry: Entry, destination: string, - options: { - // When `bundle` is set to false, we apply shims to the Worker, but won't pull in any imports - bundle?: boolean; - serveAssetsFromWorker: boolean; - assets?: StaticAssetsConfig; - doBindings: DurableObjectBindings; - jsxFactory?: string; - jsxFragment?: string; - entryName?: string; - rules: Config["rules"]; - watch?: boolean; - tsconfig?: string; - minify?: boolean; - legacyNodeCompat?: boolean; - nodejsCompat?: boolean; - define: Config["define"]; - checkFetch: boolean; - services?: Config["services"]; - workerDefinitions?: WorkerRegistry; - targetConsumer: "dev" | "deploy"; - testScheduled?: boolean; - inject?: string[]; - loader?: Record; - sourcemap?: esbuild.CommonOptions["sourcemap"]; - plugins?: esbuild.Plugin[]; - additionalModules?: CfModule[]; - // TODO: Rip these out https://github.com/cloudflare/workers-sdk/issues/2153 - disableModuleCollection?: boolean; - isOutfile?: boolean; - forPages?: boolean; - local: boolean; - } -): Promise { - const { - bundle = true, + { + bundle, + moduleCollector = noopModuleCollector, + additionalModules = [], serveAssetsFromWorker, doBindings, jsxFactory, jsxFragment, entryName, - rules, watch, tsconfig, minify, legacyNodeCompat, nodejsCompat, + define, checkFetch, assets, + bypassAssetCache, workerDefinitions, services, targetConsumer, @@ -172,13 +116,11 @@ export async function bundleWorker( loader, sourcemap, plugins, - disableModuleCollection, isOutfile, forPages, - additionalModules = [], local, - } = options; - + }: BundleOptions +): Promise { // We create a temporary directory for any one-off files we // need to create. This is separate from the main build // directory (`destination`). @@ -187,59 +129,19 @@ export async function bundleWorker( // without symlinks, otherwise `esbuild` will generate invalid source maps. const tmpDirPath = fs.realpathSync(unsafeTmpDir.path); - const entryDirectory = path.dirname(entry.file); - let moduleCollector = createModuleCollector({ - wrangler1xlegacyModuleReferences: { - rootDirectory: entryDirectory, - fileNames: new Set( - fs - .readdirSync(entryDirectory, { withFileTypes: true }) - .filter( - (dirEntry) => - dirEntry.isFile() && dirEntry.name !== path.basename(entry.file) - ) - .map((dirEnt) => dirEnt.name) - ), - }, - format: entry.format, - rules, - }); - if (disableModuleCollection) { - moduleCollector = { - modules: [], - plugin: { - name: moduleCollector.plugin.name, - setup: () => {}, - }, - }; - } - - // In dev, we want to patch `fetch()` with a special version that looks - // for bad usages and can warn the user about them; so we inject - // `checked-fetch.js` to do so. However, with yarn 3 style pnp, - // we need to extract that file to an accessible place before injecting - // it in, hence this code here. + const entryFile = entry.file; - const checkedFetchFileToInject = path.join(tmpDirPath, "checked-fetch.js"); + // At this point, we take the opportunity to "wrap" the worker with middleware. + const middlewareToLoad: MiddlewareLoader[] = []; - if (checkFetch && !fs.existsSync(checkedFetchFileToInject)) { - fs.mkdirSync(tmpDirPath, { - recursive: true, + if (targetConsumer === "dev" && !!testScheduled) { + middlewareToLoad.push({ + name: "scheduled", + path: "templates/middleware/middleware-scheduled.ts", }); - fs.writeFileSync( - checkedFetchFileToInject, - fs.readFileSync(path.resolve(getBasePath(), "templates/checked-fetch.js")) - ); } - // At this point, we take the opportunity to "wrap" any input workers - // with any extra functionality we may want to add. - const middlewareToLoad: MiddlewareLoader[] = [ - { - name: "scheduled", - path: "templates/middleware/middleware-scheduled.ts", - active: targetConsumer === "dev" && !!testScheduled, - }, + if (targetConsumer === "dev" && local) { // In Miniflare 3, we bind the user's worker as a service binding in a // special entry worker that handles things like injecting `Request.cf`, // live-reload, and the pretty-error page. @@ -255,15 +157,16 @@ export async function bundleWorker( // // This middleware wraps the user's worker in a `try/catch`, and rewrites // errors in this format so a pretty-error page can be shown. - { + middlewareToLoad.push({ name: "miniflare3-json-error", path: "templates/middleware/middleware-miniflare3-json-error.ts", - active: targetConsumer === "dev" && local, - }, - { + }); + } + + if (serveAssetsFromWorker) { + middlewareToLoad.push({ name: "serve-static-assets", path: "templates/middleware/middleware-serve-static-assets.ts", - active: serveAssetsFromWorker, config: { spaMode: typeof assets === "object" ? assets.serve_single_page_app : false, @@ -272,22 +175,25 @@ export async function bundleWorker( ? { browserTTL: assets.browser_TTL || 172800 /* 2 days: 2* 60 * 60 * 24 */, - bypassCache: assets.bypassCache, + bypassCache: bypassAssetCache, } : {}, }, - }, - { + }); + } + + if ( + targetConsumer === "dev" && + !!( + workerDefinitions && + Object.keys(workerDefinitions).length > 0 && + services && + services.length > 0 + ) + ) { + middlewareToLoad.push({ name: "multiworker-dev", path: "templates/middleware/middleware-multiworker-dev.ts", - active: - targetConsumer === "dev" && - !!( - workerDefinitions && - Object.keys(workerDefinitions).length > 0 && - services && - services.length > 0 - ), config: { workers: Object.fromEntries( (services || []).map((serviceBinding) => [ @@ -296,11 +202,11 @@ export async function bundleWorker( ]) ), }, - }, - ]; + }); + } - // If using watch, build result will not be returned - // This plugin will retreive the build result on the first build + // If using watch, build result will not be returned. + // This plugin will retrieve the build result on the first build. let initialBuildResult: (result: esbuild.BuildResult) => void; const initialBuildResultPromise = new Promise( (resolve) => (initialBuildResult = resolve) @@ -313,31 +219,69 @@ export async function bundleWorker( }; const inject: string[] = injectOption ?? []; - if (checkFetch) inject.push(checkedFetchFileToInject); - const activeMiddleware = middlewareToLoad.filter( - // We dynamically filter the middleware depending on where we are bundling for - (m) => m.active - ); - let inputEntry: EntryWithInject = entry; + + if (checkFetch) { + // In dev, we want to patch `fetch()` with a special version that looks + // for bad usages and can warn the user about them; so we inject + // `checked-fetch.js` to do so. However, with yarn 3 style pnp, + // we need to extract that file to an accessible place before injecting + // it in, hence this code here. + + const checkedFetchFileToInject = path.join(tmpDirPath, "checked-fetch.js"); + + if (checkFetch && !fs.existsSync(checkedFetchFileToInject)) { + fs.mkdirSync(tmpDirPath, { + recursive: true, + }); + fs.writeFileSync( + checkedFetchFileToInject, + fs.readFileSync( + path.resolve(getBasePath(), "templates/checked-fetch.js") + ) + ); + } + + inject.push(checkedFetchFileToInject); + } + if ( - activeMiddleware.length > 0 || + middlewareToLoad.length > 0 || process.env.EXPERIMENTAL_MIDDLEWARE === "true" ) { - inputEntry = await applyMiddlewareLoaderFacade( + const result = await applyMiddlewareLoaderFacade( entry, tmpDirPath, - activeMiddleware, + middlewareToLoad, doBindings ); - if (inputEntry.inject !== undefined) inject.push(...inputEntry.inject); + entry = result.entry; + + /** + * When applying the middleware facade for service workers, we need to inject + * some code at the top of the final output bundle. Applying an inject too early + * will allow esbuild to reorder the code. Additionally, we need to make sure + * user code is bundled in the final esbuild step with `watch` correctly + * configured, so code changes are detected. + * + * This type is used as the return type for the `MiddlewareFn` type representing + * a facade-applying function. Returned injects should be injected with the + * final esbuild step. + */ + inject.push(...(result.inject ?? [])); } + // `esbuild` doesn't support returning `watch*` options from `onStart()` + // plugin callbacks. Instead, we define an empty virtual module that is + // imported in this injected module. Importing that module registers watchers. + inject.push(path.resolve(getBasePath(), "templates/modules-watch-stub.js")); + const buildOptions: esbuild.BuildOptions & { metafile: true } = { - entryPoints: [inputEntry.file], + // Don't use entryFile here as the file may have been changed when applying the middleware + entryPoints: [entry.file], bundle, absWorkingDir: entry.directory, outdir: destination, - entryNames: entryName || path.parse(entry.file).name, + entryNames: entryName || path.parse(entryFile).name, ...(isOutfile ? { outdir: undefined, @@ -362,7 +306,7 @@ export async function bundleWorker( // when we do a build of wrangler. (re: https://github.com/cloudflare/workers-sdk/issues/1477) "process.env.NODE_ENV": `"${process.env["NODE_ENV" + ""]}"`, ...(legacyNodeCompat ? { global: "globalThis" } : {}), - ...options.define, + ...define, }, }), loader: { @@ -418,7 +362,7 @@ export async function bundleWorker( throw e; } - const entryPoint = getEntryPointFromMetafile(entry.file, result.metafile); + const entryPoint = getEntryPointFromMetafile(entryFile, result.metafile); const bundleType = entryPoint.exports.length > 0 ? "esm" : "commonjs"; const sourceMapPath = Object.keys(result.metafile.outputs).filter((_path) => @@ -437,15 +381,7 @@ export async function bundleWorker( ...additionalModules, ]); - // copy all referenced modules into the output bundle directory - for (const module of modules) { - const modulePath = path.join( - path.dirname(resolvedEntryPointPath), - module.name - ); - fs.mkdirSync(path.dirname(modulePath), { recursive: true }); - fs.writeFileSync(modulePath, module.content); - } + await writeAdditionalModules(modules, path.dirname(resolvedEntryPointPath)); return { modules, @@ -458,170 +394,5 @@ export async function bundleWorker( tmpDir: tmpDirPath, entryDirectory: entry.directory, }, - moduleCollector, }; } - -/** - * A facade that acts as a "middleware loader". - * Instead of needing to apply a facade for each individual middleware, this allows - * middleware to be written in a more traditional manner and then be applied all - * at once, requiring just two esbuild steps, rather than 1 per middleware. - */ - -interface MiddlewareLoader { - name: string; - path: string; - active: boolean; - // This will be provided as a virtual module at config:middleware/${name} - config?: Record; -} - -async function applyMiddlewareLoaderFacade( - entry: Entry, - tmpDirPath: string, - middleware: MiddlewareLoader[], // a list of paths to middleware files - doBindings: DurableObjectBindings -): Promise { - // Firstly we need to insert the middleware array into the project, - // and then we load the middleware - this insertion and loading is - // different for each format. - - // We need to import each of the middlewares, so we need to generate a - // random, unique identifier that we can use for the import. - // Middlewares are required to be default exports so we can import to any name. - const middlewareIdentifiers = middleware.map((m, index) => [ - `__MIDDLEWARE_${index}__`, - path.resolve(getBasePath(), m.path), - ]); - - const dynamicFacadePath = path.join( - tmpDirPath, - "middleware-insertion-facade.js" - ); - const imports = middlewareIdentifiers - .map( - ([id, middlewarePath]) => - /*javascript*/ `import * as ${id} from "${prepareFilePath( - middlewarePath - )}";` - ) - .join("\n"); - - const middlewareFns = middlewareIdentifiers.map(([m]) => `${m}.default`); - - if (entry.format === "modules") { - const middlewareWrappers = middlewareIdentifiers - .map(([m]) => `${m}.wrap`) - .join(","); - - const durableObjects = doBindings - // Don't shim anything not local to this worker - .filter((b) => !b.script_name) - // Reexport the DO classnames - .map( - (b) => - /*javascript*/ `export const ${b.class_name} = maskDurableObjectDefinition(OTHER_EXPORTS.${b.class_name});` - ) - .join("\n"); - await fs.promises.writeFile( - dynamicFacadePath, - dedent/*javascript*/ ` - import worker, * as OTHER_EXPORTS from "${prepareFilePath(entry.file)}"; - ${imports} - const envWrappers = [${middlewareWrappers}].filter(Boolean); - const facade = { - ...worker, - envWrappers, - middleware: [ - ${middlewareFns.join(",")}, - ...(worker.middleware ? worker.middleware : []), - ].filter(Boolean) - } - export * from "${prepareFilePath(entry.file)}"; - - const maskDurableObjectDefinition = (cls) => - class extends cls { - constructor(state, env) { - let wrappedEnv = env - for (const wrapFn of envWrappers) { - wrappedEnv = wrapFn(wrappedEnv) - } - super(state, wrappedEnv); - } - }; - ${durableObjects} - - export default facade; - ` - ); - - const targetPathLoader = path.join( - tmpDirPath, - "middleware-loader.entry.ts" - ); - const loaderPath = path.resolve( - getBasePath(), - "templates/middleware/loader-modules.ts" - ); - - const baseLoader = await fs.promises.readFile(loaderPath, "utf-8"); - const transformedLoader = baseLoader - .replaceAll("__ENTRY_POINT__", prepareFilePath(dynamicFacadePath)) - .replace( - "./common", - prepareFilePath( - path.resolve(getBasePath(), "templates/middleware/common.ts") - ) - ); - - await fs.promises.writeFile(targetPathLoader, transformedLoader); - - return { - ...entry, - file: targetPathLoader, - }; - } else { - const loaderSwPath = path.resolve( - getBasePath(), - "templates/middleware/loader-sw.ts" - ); - - await fs.promises.writeFile( - dynamicFacadePath, - dedent/*javascript*/ ` - import { __facade_registerInternal__ } from "${prepareFilePath(loaderSwPath)}"; - ${imports} - __facade_registerInternal__([${middlewareFns}]) - ` - ); - - return { - ...entry, - inject: [dynamicFacadePath], - }; - } -} - -/** - * Prefer modules towards the end of the array in the case of a collision by name. - */ -export function dedupeModulesByName(modules: CfModule[]): CfModule[] { - return Object.values( - modules.reduce((moduleMap, module) => { - moduleMap[module.name] = module; - return moduleMap; - }, {} as Record) - ); -} -/** - * Process the given file path to ensure it will work on all OSes. - * - * Windows paths contain backslashes, which are taken to be escape characters - * when inserted directly into source code. - * This function will escape these backslashes to make sure they work in all OSes. - * - */ -function prepareFilePath(filePath: string): string { - return JSON.stringify(filePath).slice(1, -1); -} diff --git a/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts b/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts index 2364a10d79a3..63f4f752f989 100644 --- a/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts +++ b/packages/wrangler/src/deployment-bundle/create-worker-upload-form.ts @@ -1,4 +1,6 @@ +import assert from "node:assert"; import { readFileSync } from "node:fs"; +import path from "node:path"; import { FormData, File } from "undici"; import { handleUnsafeCapnp } from "./capnp"; import type { @@ -103,7 +105,7 @@ export interface WorkerMetadata { logpush?: boolean; placement?: CfPlacement; tail_consumers?: CfTailConsumer[]; - // Allow unsafe.metadata to add arbitary properties at runtime + // Allow unsafe.metadata to add arbitrary properties at runtime [key: string]: unknown; } @@ -331,6 +333,50 @@ export function createWorkerUploadForm(worker: CfWorkerInit): FormData { ); } + const manifestModuleName = "__STATIC_CONTENT_MANIFEST"; + const hasManifest = modules?.some(({ name }) => name === manifestModuleName); + if (hasManifest && main.type === "esm") { + assert(modules !== undefined); + // Each modules-format worker has a virtual file system for module + // resolution. For example, uploading modules with names `1.mjs`, + // `a/2.mjs` and `a/b/3.mjs`, creates virtual directories `a` and `a/b`. + // `1.mjs` is in the virtual root directory. + // + // The above code adds the `__STATIC_CONTENT_MANIFEST` module to the root + // directory. This means `import manifest from "__STATIC_CONTENT_MANIFEST"` + // will only work if the importing module is also in the root. If the + // importing module was `a/b/3.mjs` for example, the import would need to + // be `import manifest from "../../__STATIC_CONTENT_MANIFEST"`. + // + // When Wrangler bundles all user code, this isn't a problem, as code is + // only ever uploaded to the root. However, once `--no-bundle` or + // `find_additional_modules` is enabled, the user controls the directory + // structure. + // + // To fix this, if we've got a modules-format worker, we add stub modules + // in each subdirectory that re-export the manifest module from the root. + // This allows the manifest to be imported as `__STATIC_CONTENT_MANIFEST` + // in every directory, whilst avoiding duplication of the manifest. + + // Collect unique subdirectories + const subDirs = new Set( + modules.map((module) => path.posix.dirname(module.name)) + ); + for (const subDir of subDirs) { + // Ignore `.` as it's not a subdirectory, and we don't want to + // register the manifest module in the root twice. + if (subDir === ".") continue; + const relativePath = path.posix.relative(subDir, manifestModuleName); + const filePath = path.posix.join(subDir, manifestModuleName); + modules.push({ + name: filePath, + filePath, + content: `export { default } from ${JSON.stringify(relativePath)};`, + type: "esm", + }); + } + } + if (main.type === "commonjs") { // This is a service-worker format worker. for (const module of Object.values([...(modules || [])])) { diff --git a/packages/wrangler/src/deployment-bundle/dedupe-modules.ts b/packages/wrangler/src/deployment-bundle/dedupe-modules.ts new file mode 100644 index 000000000000..4a81f8f9b7c2 --- /dev/null +++ b/packages/wrangler/src/deployment-bundle/dedupe-modules.ts @@ -0,0 +1,15 @@ +import type { CfModule } from "./worker"; + +/** + * Remove duplicate modules from the array. + * + * Prefer modules towards the end of the array in the case of a collision by name. + */ +export function dedupeModulesByName(modules: CfModule[]): CfModule[] { + return Object.values( + modules.reduce((moduleMap, module) => { + moduleMap[module.name] = module; + return moduleMap; + }, {} as Record) + ); +} diff --git a/packages/wrangler/src/deployment-bundle/find-additional-modules.ts b/packages/wrangler/src/deployment-bundle/find-additional-modules.ts new file mode 100644 index 000000000000..c74c2e1a2dbd --- /dev/null +++ b/packages/wrangler/src/deployment-bundle/find-additional-modules.ts @@ -0,0 +1,153 @@ +import { mkdir, readdir, readFile, writeFile } from "node:fs/promises"; +import path from "node:path"; +import chalk from "chalk"; +import globToRegExp from "glob-to-regexp"; +import { logger } from "../logger"; +import { RuleTypeToModuleType } from "./module-collection"; +import { parseRules } from "./rules"; +import type { Rule } from "../config/environment"; +import type { Entry } from "./entry"; +import type { ParsedRules } from "./rules"; +import type { CfModule } from "./worker"; + +async function* getFiles( + root: string, + relativeTo: string +): AsyncGenerator { + for (const file of await readdir(root, { withFileTypes: true })) { + if (file.isDirectory()) { + yield* getFiles(path.join(root, file.name), relativeTo); + } else { + // Module names should always use `/`. This is also required to match globs correctly on Windows. Later code will + // `path.resolve()` with these names to read contents which will perform appropriate normalisation. + yield path + .relative(relativeTo, path.join(root, file.name)) + .replaceAll("\\", "/"); + } + } +} + +/** + * Search the filesystem under the `moduleRoot` of the `entry` for potential additional modules + * that match the given `rules`. + */ +export async function findAdditionalModules( + entry: Entry, + rules: Rule[] | ParsedRules +): Promise { + const files = getFiles(entry.moduleRoot, entry.moduleRoot); + const relativeEntryPoint = path + .relative(entry.moduleRoot, entry.file) + .replaceAll("\\", "/"); + + if (Array.isArray(rules)) rules = parseRules(rules); + const modules = (await matchFiles(files, entry.moduleRoot, rules)) + .filter((m) => m.name !== relativeEntryPoint) + .map((m) => ({ + ...m, + name: m.name, + })); + + if (modules.length > 0) { + logger.info(`Attaching additional modules:`); + modules.forEach(({ name, type }) => { + logger.info(`- ${chalk.blue(name)} (${chalk.green(type ?? "")})`); + }); + } + + return modules; +} + +async function matchFiles( + files: AsyncGenerator, + relativeTo: string, + { rules, removedRules }: ParsedRules +) { + const modules: CfModule[] = []; + + // Use the `moduleNames` set to deduplicate modules. + // This is usually a poorly specified `wrangler.toml` configuration, but duplicate modules will cause a crash at runtime + const moduleNames = new Set(); + + for await (const filePath of files) { + for (const rule of rules) { + for (const glob of rule.globs) { + const regexp = globToRegExp(glob, { + globstar: true, + }); + if (!regexp.test(filePath)) { + continue; + } + const fileContent = await readFile(path.join(relativeTo, filePath)); + + const module = { + name: filePath, + content: fileContent, + filePath, + type: RuleTypeToModuleType[rule.type], + }; + + if (!moduleNames.has(module.name)) { + moduleNames.add(module.name); + modules.push(module); + } else { + logger.warn( + `Ignoring duplicate module: ${chalk.blue( + module.name + )} (${chalk.green(module.type ?? "")})` + ); + } + } + } + + // This is just a sanity check verifying that no files match rules that were removed + for (const rule of removedRules) { + for (const glob of rule.globs) { + const regexp = globToRegExp(glob); + if (regexp.test(filePath)) { + throw new Error( + `The file ${filePath} matched a module rule in your configuration (${JSON.stringify( + rule + )}), but was ignored because a previous rule with the same type was not marked as \`fallthrough = true\`.` + ); + } + } + } + } + + return modules; +} + +/** + * Recursively finds all directories contained within and including `root`, + * that should be watched for additional modules. Excludes `node_modules` and + * `.git` folders in case the root is the project root, to avoid watching too + * much. + */ +export async function* findAdditionalModuleWatchDirs( + root: string +): AsyncGenerator { + yield root; + for (const entry of await readdir(root, { withFileTypes: true })) { + if (entry.isDirectory()) { + if (entry.name === "node_modules" || entry.name === ".git") continue; + yield* findAdditionalModuleWatchDirs(path.join(root, entry.name)); + } + } +} + +/** + * When we are writing files to an `outDir`, this function ensures that any additional + * modules that were found (by matching rules) are also copied to the destination directory. + */ +export async function writeAdditionalModules( + modules: CfModule[], + destination: string +): Promise { + for (const module of modules) { + const modulePath = path.resolve(destination, module.name); + logger.debug("Writing additional module to output", modulePath); + await mkdir(path.dirname(modulePath), { recursive: true }); + await writeFile(modulePath, module.content); + } +} diff --git a/packages/wrangler/src/module-collection.ts b/packages/wrangler/src/deployment-bundle/module-collection.ts similarity index 52% rename from packages/wrangler/src/module-collection.ts rename to packages/wrangler/src/deployment-bundle/module-collection.ts index d4d3df59d555..a950068ecf85 100644 --- a/packages/wrangler/src/module-collection.ts +++ b/packages/wrangler/src/deployment-bundle/module-collection.ts @@ -1,15 +1,17 @@ import crypto from "node:crypto"; +import { readdirSync } from "node:fs"; import { readFile } from "node:fs/promises"; import path from "node:path"; -import chalk from "chalk"; import globToRegExp from "glob-to-regexp"; -import { logger } from "./logger"; -import type { Config, ConfigModuleRuleType } from "./config"; -import type { - CfModule, - CfModuleType, - CfScriptFormat, -} from "./deployment-bundle/worker"; +import { logger } from "../logger"; +import { + findAdditionalModules, + findAdditionalModuleWatchDirs, +} from "./find-additional-modules"; +import { isJavaScriptModuleRule, parseRules } from "./rules"; +import type { Config, ConfigModuleRuleType } from "../config"; +import type { Entry } from "./entry"; +import type { CfModule, CfModuleType } from "./worker"; import type esbuild from "esbuild"; function flipObject< @@ -19,13 +21,14 @@ function flipObject< return Object.fromEntries(Object.entries(obj).map(([k, v]) => [v, k])); } -const RuleTypeToModuleType: Record = { - ESModule: "esm", - CommonJS: "commonjs", - CompiledWasm: "compiled-wasm", - Data: "buffer", - Text: "text", -}; +export const RuleTypeToModuleType: Record = + { + ESModule: "esm", + CommonJS: "commonjs", + CompiledWasm: "compiled-wasm", + Data: "buffer", + Text: "text", + }; export const ModuleTypeToRuleType = flipObject(RuleTypeToModuleType); @@ -37,140 +40,43 @@ export const ModuleTypeToRuleType = flipObject(RuleTypeToModuleType); // plugin+array is used to collect references to these modules, reference // them correctly in the bundle, and add them to the form upload. -export const DEFAULT_MODULE_RULES: Config["rules"] = [ - { type: "Text", globs: ["**/*.txt", "**/*.html"] }, - { type: "Data", globs: ["**/*.bin"] }, - { type: "CompiledWasm", globs: ["**/*.wasm", "**/*.wasm?module"] }, -]; - -export function parseRules(userRules: Config["rules"] = []) { - const rules: Config["rules"] = [...userRules, ...DEFAULT_MODULE_RULES]; - - const completedRuleLocations: Record = {}; - let index = 0; - const rulesToRemove: Config["rules"] = []; - for (const rule of rules) { - if (rule.type in completedRuleLocations) { - if (rules[completedRuleLocations[rule.type]].fallthrough !== false) { - if (index < userRules.length) { - logger.warn( - `The module rule at position ${index} (${JSON.stringify( - rule - )}) has the same type as a previous rule (at position ${ - completedRuleLocations[rule.type] - }, ${JSON.stringify( - rules[completedRuleLocations[rule.type]] - )}). This rule will be ignored. To the previous rule, add \`fallthrough = true\` to allow this one to also be used, or \`fallthrough = false\` to silence this warning.` - ); - } else { - logger.warn( - `The default module rule ${JSON.stringify( - rule - )} has the same type as a previous rule (at position ${ - completedRuleLocations[rule.type] - }, ${JSON.stringify( - rules[completedRuleLocations[rule.type]] - )}). This rule will be ignored. To the previous rule, add \`fallthrough = true\` to allow the default one to also be used, or \`fallthrough = false\` to silence this warning.` - ); - } - } - - rulesToRemove.push(rule); - } - if (!(rule.type in completedRuleLocations) && rule.fallthrough !== true) { - completedRuleLocations[rule.type] = index; - } - index++; - } - - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - rulesToRemove.forEach((rule) => rules!.splice(rules!.indexOf(rule), 1)); - - return { rules, removedRules: rulesToRemove }; -} +export type ModuleCollector = { + modules: CfModule[]; + plugin: esbuild.Plugin; +}; -export async function matchFiles( - files: string[], - relativeTo: string, - { - rules, - removedRules, - }: { rules: Config["rules"]; removedRules: Config["rules"] } -) { - const modules: CfModule[] = []; +const modulesWatchRegexp = /^wrangler:modules-watch$/; +const modulesWatchNamespace = "wrangler-modules-watch"; - // Deduplicate modules. This is usually a poorly specified `wrangler.toml` configuration, but duplicate modules will cause a crash at runtime - const moduleNames = new Set(); - for (const rule of rules) { - for (const glob of rule.globs) { - const regexp = globToRegExp(glob, { - globstar: true, +export const noopModuleCollector: ModuleCollector = { + modules: [], + plugin: { + name: "wrangler-module-collector", + setup: (build) => { + build.onResolve({ filter: modulesWatchRegexp }, (args) => { + return { namespace: modulesWatchNamespace, path: args.path }; }); - const newModules = await Promise.all( - files - .filter((f) => regexp.test(f)) - .map(async (name) => { - const filePath = path.join(relativeTo, name); - const fileContent = await readFile(filePath); - - return { - name, - filePath, - content: fileContent, - type: RuleTypeToModuleType[rule.type], - }; - }) + build.onLoad( + { namespace: modulesWatchNamespace, filter: modulesWatchRegexp }, + () => ({ contents: "", loader: "js" }) ); - for (const module of newModules) { - if (!moduleNames.has(module.name)) { - moduleNames.add(module.name); - modules.push(module); - } else { - logger.warn( - `Ignoring duplicate module: ${chalk.blue( - module.name - )} (${chalk.green(module.type ?? "")})` - ); - } - } - } - } - - // This is just a sanity check verifying that no files match rules that were removed - for (const rule of removedRules) { - for (const glob of rule.globs) { - const regexp = globToRegExp(glob); - for (const file of files) { - if (regexp.test(file)) { - throw new Error( - `The file ${file} matched a module rule in your configuration (${JSON.stringify( - rule - )}), but was ignored because a previous rule with the same type was not marked as \`fallthrough = true\`.` - ); - } - } - } - } - return modules; -} - -export type ModuleCollector = { - modules: CfModule[]; - plugin: esbuild.Plugin; + }, + }, }; -export default function createModuleCollector(props: { - format: CfScriptFormat; +export function createModuleCollector(props: { + entry: Entry; + findAdditionalModules: boolean; rules?: Config["rules"]; // a collection of "legacy" style module references, which are just file names // we will eventually deprecate this functionality, hence the verbose greppable name - wrangler1xlegacyModuleReferences: { + wrangler1xLegacyModuleReferences?: { rootDirectory: string; fileNames: Set; }; preserveFileNames?: boolean; }): ModuleCollector { - const { rules, removedRules } = parseRules(props.rules); + const parsedRules = parseRules(props.rules); const modules: CfModule[] = []; return { @@ -178,17 +84,70 @@ export default function createModuleCollector(props: { plugin: { name: "wrangler-module-collector", setup(build) { - build.onStart(() => { + let foundModulePaths: string[] = []; + + build.onStart(async () => { // reset the module collection array modules.splice(0); + + if (props.findAdditionalModules) { + // Make sure we're not bundling a service worker + if (props.entry.format !== "modules") { + const error = + "`find_additional_modules` can only be used with an ES module entrypoint.\n" + + "Remove `find_additional_modules = true` from your configuration, " + + "or migrate to the ES module Worker format: " + + "https://developers.cloudflare.com/workers/learning/migrate-to-module-workers/"; + return { errors: [{ text: error }] }; + } + + const found = await findAdditionalModules(props.entry, parsedRules); + foundModulePaths = found.map(({ name }) => + path.resolve(props.entry.moduleRoot, name) + ); + modules.push(...found); + } + }); + + // `esbuild` doesn't support returning `watch*` options from `onStart()` + // callbacks. Instead, we define an empty virtual module that is + // imported in an injected module. Importing this module registers the + // required watchers. + + build.onResolve({ filter: modulesWatchRegexp }, (args) => { + return { namespace: modulesWatchNamespace, path: args.path }; }); + build.onLoad( + { namespace: modulesWatchNamespace, filter: modulesWatchRegexp }, + async () => { + let watchFiles: string[] = []; + const watchDirs: string[] = []; + if (props.findAdditionalModules) { + // Watch files to rebuild when they're changed/deleted. Note we + // could watch additional modules when we import them, but this + // doesn't cover dynamically imported modules with variable paths + // (e.g. await import(`./lang/${language}.js`)). + watchFiles = foundModulePaths; + + // Watch directories to rebuild when *new* files are added. + // Note watching directories doesn't watch their subdirectories + // or file contents: https://esbuild.github.io/plugins/#on-load-results + const root = path.resolve(props.entry.moduleRoot); + for await (const dir of findAdditionalModuleWatchDirs(root)) { + watchDirs.push(dir); + } + } + + return { contents: "", loader: "js", watchFiles, watchDirs }; + } + ); // ~ start legacy module specifier support ~ // This section detects usage of "legacy" 1.x style module specifiers // and modifies them so they "work" in wrangler v2, but with a warning - const rulesMatchers = rules.flatMap((rule) => { + const rulesMatchers = parsedRules.rules.flatMap((rule) => { return rule.globs.map((glob) => { const regex = globToRegExp(glob); return { @@ -198,12 +157,15 @@ export default function createModuleCollector(props: { }); }); - if (props.wrangler1xlegacyModuleReferences.fileNames.size > 0) { + if ( + props.wrangler1xLegacyModuleReferences && + props.wrangler1xLegacyModuleReferences.fileNames.size > 0 + ) { build.onResolve( { filter: new RegExp( "^(" + - [...props.wrangler1xlegacyModuleReferences.fileNames] + [...props.wrangler1xLegacyModuleReferences.fileNames] .map((name) => name.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")) .join("|") + ")$" @@ -229,7 +191,8 @@ export default function createModuleCollector(props: { // take the file and massage it to a // transportable/manageable format const filePath = path.join( - props.wrangler1xlegacyModuleReferences.rootDirectory, + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + props.wrangler1xLegacyModuleReferences!.rootDirectory, args.path ); const fileContent = await readFile(filePath); @@ -251,7 +214,7 @@ export default function createModuleCollector(props: { }); return { path: fileName, // change the reference to the changed module - external: props.format === "modules", // mark it as external in the bundle + external: props.entry.format === "modules", // mark it as external in the bundle namespace: `wrangler-module-${rule.type}`, // just a tag, this isn't strictly necessary watchFiles: [filePath], // we also add the file to esbuild's watch list }; @@ -262,8 +225,10 @@ export default function createModuleCollector(props: { // ~ end legacy module specifier support ~ - rules?.forEach((rule) => { - if (rule.type === "ESModule" || rule.type === "CommonJS") return; // TODO: we should treat these as js files, and use the jsx loader + parsedRules.rules?.forEach((rule) => { + if (!props.findAdditionalModules && isJavaScriptModuleRule(rule)) { + return; + } rule.globs.forEach((glob) => { build.onResolve( @@ -273,6 +238,19 @@ export default function createModuleCollector(props: { // transportable/manageable format const filePath = path.join(args.resolveDir, args.path); + + // If this was a found additional module, mark it as external. + // Note, there's no need to watch the file here as we already + // watch all `foundModulePaths` with `wrangler:modules-watch`. + if (foundModulePaths.includes(filePath)) { + return { path: args.path, external: true }; + } + // For JavaScript module rules, we only register this onResolve + // callback if `findAdditionalModules` is true. If we didn't + // find the module in `modules` in the above `if` block, leave + // it to `esbuild` to bundle it. + if (isJavaScriptModuleRule(rule)) return; + const fileContent = await readFile(filePath); const fileHash = crypto .createHash("sha1") @@ -292,14 +270,14 @@ export default function createModuleCollector(props: { return { path: fileName, // change the reference to the changed module - external: props.format === "modules", // mark it as external in the bundle + external: props.entry.format === "modules", // mark it as external in the bundle namespace: `wrangler-module-${rule.type}`, // just a tag, this isn't strictly necessary watchFiles: [filePath], // we also add the file to esbuild's watch list }; } ); - if (props.format === "service-worker") { + if (props.entry.format === "service-worker") { build.onLoad( { filter: globToRegExp(glob) }, async (args: esbuild.OnLoadArgs) => { @@ -320,7 +298,7 @@ export default function createModuleCollector(props: { }); }); - removedRules.forEach((rule) => { + parsedRules.removedRules.forEach((rule) => { rule.globs.forEach((glob) => { build.onResolve( { filter: globToRegExp(glob) }, @@ -340,3 +318,20 @@ export default function createModuleCollector(props: { }, }; } + +export function getWrangler1xLegacyModuleReferences( + rootDirectory: string, + entryPath: string +) { + return { + rootDirectory, + fileNames: new Set( + readdirSync(rootDirectory, { withFileTypes: true }) + .filter( + (dirEntry) => + dirEntry.isFile() && dirEntry.name !== path.basename(entryPath) + ) + .map((dirEnt) => dirEnt.name) + ), + }; +} diff --git a/packages/wrangler/src/deployment-bundle/rules.ts b/packages/wrangler/src/deployment-bundle/rules.ts new file mode 100644 index 000000000000..7a1be3b85540 --- /dev/null +++ b/packages/wrangler/src/deployment-bundle/rules.ts @@ -0,0 +1,63 @@ +import { logger } from "../logger"; +import type { Rule } from "../config/environment"; + +export function isJavaScriptModuleRule(rule: Rule) { + return rule.type === "ESModule" || rule.type === "CommonJS"; +} + +export const DEFAULT_MODULE_RULES: Rule[] = [ + { type: "Text", globs: ["**/*.txt", "**/*.html"] }, + { type: "Data", globs: ["**/*.bin"] }, + { type: "CompiledWasm", globs: ["**/*.wasm", "**/*.wasm?module"] }, +]; + +export interface ParsedRules { + rules: Rule[]; + removedRules: Rule[]; +} + +export function parseRules(userRules: Rule[] = []): ParsedRules { + const rules: Rule[] = [...userRules, ...DEFAULT_MODULE_RULES]; + + const completedRuleLocations: Record = {}; + let index = 0; + const rulesToRemove: Rule[] = []; + for (const rule of rules) { + if (rule.type in completedRuleLocations) { + if (rules[completedRuleLocations[rule.type]].fallthrough !== false) { + if (index < userRules.length) { + logger.warn( + `The module rule at position ${index} (${JSON.stringify( + rule + )}) has the same type as a previous rule (at position ${ + completedRuleLocations[rule.type] + }, ${JSON.stringify( + rules[completedRuleLocations[rule.type]] + )}). This rule will be ignored. To use the previous rule, add \`fallthrough = true\` to allow this one to also be used, or \`fallthrough = false\` to silence this warning.` + ); + } else { + logger.warn( + `The default module rule ${JSON.stringify( + rule + )} has the same type as a previous rule (at position ${ + completedRuleLocations[rule.type] + }, ${JSON.stringify( + rules[completedRuleLocations[rule.type]] + )}). This rule will be ignored. To use the previous rule, add \`fallthrough = true\` to allow the default one to also be used, or \`fallthrough = false\` to silence this warning.` + ); + } + } + + rulesToRemove.push(rule); + } + if (!(rule.type in completedRuleLocations) && rule.fallthrough !== true) { + completedRuleLocations[rule.type] = index; + } + index++; + } + + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + rulesToRemove.forEach((rule) => rules!.splice(rules!.indexOf(rule), 1)); + + return { rules, removedRules: rulesToRemove }; +} diff --git a/packages/wrangler/src/deployment-bundle/traverse-module-graph.ts b/packages/wrangler/src/deployment-bundle/traverse-module-graph.ts deleted file mode 100644 index a2dac5508ff9..000000000000 --- a/packages/wrangler/src/deployment-bundle/traverse-module-graph.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { readdir } from "node:fs/promises"; -import path from "node:path"; -import chalk from "chalk"; -import { logger } from "../logger"; -import { matchFiles, parseRules } from "../module-collection"; -import type { Config } from "../config"; -import type { BundleResult } from "./bundle"; -import type { Entry } from "./entry"; - -async function getFiles(root: string, relativeTo: string): Promise { - const files = []; - for (const file of await readdir(root, { withFileTypes: true })) { - if (file.isDirectory()) { - files.push(...(await getFiles(path.join(root, file.name), relativeTo))); - } else { - // Module names should always use `/`. This is also required to match globs correctly on Windows. Later code will - // `path.resolve()` with these names to read contents which will perform appropriate normalisation. - files.push( - path - .relative(relativeTo, path.join(root, file.name)) - .replaceAll("\\", "/") - ); - } - } - return files; -} - -export default async function traverseModuleGraph( - entry: Entry, - rules: Config["rules"] -): Promise { - const files = await getFiles(entry.moduleRoot, entry.moduleRoot); - const relativeEntryPoint = path - .relative(entry.moduleRoot, entry.file) - .replaceAll("\\", "/"); - - const modules = (await matchFiles(files, entry.moduleRoot, parseRules(rules))) - .filter((m) => m.name !== relativeEntryPoint) - .map((m) => ({ - ...m, - name: m.name, - })); - - const bundleType = entry.format === "modules" ? "esm" : "commonjs"; - - if (modules.length > 0) { - logger.info(`Attaching additional modules:`); - modules.forEach(({ name, type }) => { - logger.info(`- ${chalk.blue(name)} (${chalk.green(type ?? "")})`); - }); - } - - return { - modules, - dependencies: {}, - resolvedEntryPointPath: entry.file, - bundleType, - stop: undefined, - sourceMapPath: undefined, - sourceMapMetadata: undefined, - moduleCollector: undefined, - }; -} diff --git a/packages/wrangler/src/dev.tsx b/packages/wrangler/src/dev.tsx index 0a26a2cf1ab9..ffc930179f3f 100644 --- a/packages/wrangler/src/dev.tsx +++ b/packages/wrangler/src/dev.tsx @@ -414,6 +414,7 @@ export async function startDev(args: StartDevOptions) { { if (!destination) return; - let traverseModuleGraphResult: - | Awaited> - | undefined; - let bundleResult: Awaited> | undefined; if (noBundle) { - traverseModuleGraphResult = await traverseModuleGraph(entry, rules); + additionalModules = dedupeModulesByName([ + ...((await doFindAdditionalModules(entry, rules)) ?? []), + ...additionalModules, + ]); } - if (processEntrypoint || !noBundle) { - bundleResult = await bundleWorker(entry, destination, { - bundle: !noBundle, - disableModuleCollection: noBundle, - serveAssetsFromWorker, - jsxFactory, - jsxFragment, - rules, - tsconfig, - minify, - legacyNodeCompat, - nodejsCompat, - define, - checkFetch: true, - assets: assets && { - ...assets, - // disable the cache in dev - bypassCache: true, - }, - workerDefinitions, - services, - targetConsumer: "dev", // We are starting a dev server - local, - testScheduled, - doBindings, - additionalModules: dedupeModulesByName([ - ...(traverseModuleGraphResult?.modules ?? []), - ...additionalModules, - ]), - }); - } + const entryDirectory = path.dirname(entry.file); + const moduleCollector = noBundle + ? noopModuleCollector + : createModuleCollector({ + wrangler1xLegacyModuleReferences: getWrangler1xLegacyModuleReferences( + entryDirectory, + entry.file + ), + entry, + findAdditionalModules: findAdditionalModules ?? false, + rules, + }); + + const bundleResult = + processEntrypoint || !noBundle + ? await bundleWorker(entry, destination, { + bundle: !noBundle, + additionalModules, + moduleCollector, + serveAssetsFromWorker, + jsxFactory, + jsxFragment, + tsconfig, + minify, + legacyNodeCompat, + nodejsCompat, + define, + checkFetch: true, + assets, + // disable the cache in dev + bypassAssetCache: true, + workerDefinitions, + services, + targetConsumer: "dev", // We are starting a dev server + local, + testScheduled, + doBindings, + }) + : undefined; return { id: 0, entry, path: bundleResult?.resolvedEntryPointPath ?? entry.file, - type: - bundleResult?.bundleType ?? - (entry.format === "modules" ? "esm" : "commonjs"), - modules: bundleResult - ? bundleResult.modules - : dedupeModulesByName([ - ...(traverseModuleGraphResult?.modules ?? []), - ...additionalModules, - ]), + type: bundleResult?.bundleType ?? getBundleType(entry.format), + modules: bundleResult ? bundleResult.modules : additionalModules, dependencies: bundleResult?.dependencies ?? {}, sourceMapPath: bundleResult?.sourceMapPath, sourceMapMetadata: bundleResult?.sourceMapMetadata, diff --git a/packages/wrangler/src/dev/use-esbuild.ts b/packages/wrangler/src/dev/use-esbuild.ts index fec9da96595a..808e9079b38e 100644 --- a/packages/wrangler/src/dev/use-esbuild.ts +++ b/packages/wrangler/src/dev/use-esbuild.ts @@ -1,38 +1,37 @@ import assert from "node:assert"; +import path from "node:path"; import { watch } from "chokidar"; import { useApp } from "ink"; import { useState, useEffect } from "react"; +import { rewriteNodeCompatBuildFailure } from "../deployment-bundle/build-failures"; +import { bundleWorker } from "../deployment-bundle/bundle"; +import { getBundleType } from "../deployment-bundle/bundle-type"; +import { dedupeModulesByName } from "../deployment-bundle/dedupe-modules"; +import { findAdditionalModules as doFindAdditionalModules } from "../deployment-bundle/find-additional-modules"; import { - bundleWorker, - dedupeModulesByName, - rewriteNodeCompatBuildFailure, -} from "../deployment-bundle/bundle"; -import traverseModuleGraph from "../deployment-bundle/traverse-module-graph"; + createModuleCollector, + noopModuleCollector, + getWrangler1xLegacyModuleReferences, +} from "../deployment-bundle/module-collection"; import { logBuildFailure, logBuildWarnings } from "../logger"; import type { Config } from "../config"; +import type { SourceMapMetadata } from "../deployment-bundle/bundle"; import type { Entry } from "../deployment-bundle/entry"; -import type { CfModule } from "../deployment-bundle/worker"; +import type { CfModule, CfModuleType } from "../deployment-bundle/worker"; import type { WorkerRegistry } from "../dev-registry"; -import type { SourceMapMetadata } from "../inspect"; -import type { ModuleCollector } from "../module-collection"; import type { Metafile, BuildResult, PluginBuild } from "esbuild"; export type EsbuildBundle = { id: number; path: string; entry: Entry; - type: "esm" | "commonjs"; + type: CfModuleType; modules: CfModule[]; dependencies: Metafile["outputs"][string]["inputs"]; sourceMapPath: string | undefined; sourceMapMetadata: SourceMapMetadata | undefined; }; -export type BundleInfo = { - bundle: EsbuildBundle; - moduleCollector: ModuleCollector | undefined; -}; - export function useEsbuild({ entry, destination, @@ -49,6 +48,7 @@ export function useEsbuild({ nodejsCompat, define, noBundle, + findAdditionalModules, workerDefinitions, services, durableObjects, @@ -73,6 +73,7 @@ export function useEsbuild({ legacyNodeCompat: boolean | undefined; nodejsCompat: boolean | undefined; noBundle: boolean; + findAdditionalModules: boolean | undefined; workerDefinitions: WorkerRegistry; durableObjects: Config["durable_objects"]; local: boolean; @@ -80,24 +81,47 @@ export function useEsbuild({ testScheduled: boolean; experimentalLocal: boolean | undefined; }): EsbuildBundle | undefined { - const [bundleInfo, setBundleInfo] = useState(); + const [bundle, setBundle] = useState(); const { exit } = useApp(); useEffect(() => { let stopWatching: (() => void) | undefined = undefined; - function updateBundle() { + const entryDirectory = path.dirname(entry.file); + const moduleCollector = noBundle + ? noopModuleCollector + : createModuleCollector({ + wrangler1xLegacyModuleReferences: getWrangler1xLegacyModuleReferences( + entryDirectory, + entry.file + ), + entry, + findAdditionalModules: findAdditionalModules ?? false, + rules: rules, + }); + + async function getAdditionalModules() { + return noBundle + ? dedupeModulesByName([ + ...((await doFindAdditionalModules(entry, rules)) ?? []), + ...additionalModules, + ]) + : additionalModules; + } + + async function updateBundle() { + const newAdditionalModules = await getAdditionalModules(); // nothing really changes here, so let's increment the id // to change the return object's identity - setBundleInfo((previousBundle) => { + setBundle((previousBundle) => { assert( previousBundle, "Rebuild triggered with no previous build available" ); - previousBundle.bundle.modules = dedupeModulesByName([ - ...previousBundle.bundle.modules, - ...(previousBundle.moduleCollector?.modules ?? []), + previousBundle.modules = dedupeModulesByName([ + ...(moduleCollector?.modules ?? []), + ...newAdditionalModules, ]); - return { ...previousBundle, id: previousBundle.bundle.id + 1 }; + return { ...previousBundle, id: previousBundle.id + 1 }; }); } @@ -105,7 +129,7 @@ export function useEsbuild({ const onEnd = { name: "on-end", setup(b: PluginBuild) { - b.onEnd((result: BuildResult) => { + b.onEnd(async (result: BuildResult) => { const errors = result.errors; const warnings = result.warnings; if (errors.length > 0) { @@ -114,15 +138,15 @@ export function useEsbuild({ return; } + if (warnings.length > 0) { + logBuildWarnings(warnings); + } + if (!bundled) { // First bundle, no need to update bundle bundled = true; } else { - updateBundle(); - } - - if (warnings.length > 0) { - logBuildWarnings(warnings); + await updateBundle(); } }); }, @@ -131,47 +155,35 @@ export function useEsbuild({ async function build() { if (!destination) return; - let traverseModuleGraphResult: - | Awaited> - | undefined; - let bundleResult: Awaited> | undefined; - if (noBundle) { - traverseModuleGraphResult = await traverseModuleGraph(entry, rules); - } - - if (processEntrypoint || !noBundle) { - bundleResult = await bundleWorker(entry, destination, { - bundle: !noBundle, - disableModuleCollection: noBundle, - serveAssetsFromWorker, - jsxFactory, - jsxFragment, - rules, - watch: true, - tsconfig, - minify, - legacyNodeCompat, - nodejsCompat, - doBindings: durableObjects.bindings, - define, - checkFetch: true, - assets: assets && { - ...assets, - // disable the cache in dev - bypassCache: true, - }, - workerDefinitions, - services, - targetConsumer, - testScheduled, - additionalModules: dedupeModulesByName([ - ...(traverseModuleGraphResult?.modules ?? []), - ...additionalModules, - ]), - plugins: [onEnd], - local, - }); - } + const newAdditionalModules = await getAdditionalModules(); + const bundleResult = + processEntrypoint || !noBundle + ? await bundleWorker(entry, destination, { + bundle: !noBundle, + moduleCollector, + additionalModules: newAdditionalModules, + serveAssetsFromWorker, + jsxFactory, + jsxFragment, + watch: true, + tsconfig, + minify, + legacyNodeCompat, + nodejsCompat, + doBindings: durableObjects.bindings, + define, + checkFetch: true, + assets, + // disable the cache in dev + bypassAssetCache: true, + workerDefinitions, + services, + targetConsumer, + testScheduled, + plugins: [onEnd], + local, + }) + : undefined; // Capture the `stop()` method to use as the `useEffect()` destructor. stopWatching = bundleResult?.stop; @@ -182,33 +194,22 @@ export function useEsbuild({ const watcher = watch(entry.file, { persistent: true, }).on("change", async (_event) => { - updateBundle(); + await updateBundle(); }); stopWatching = () => { void watcher.close(); }; } - - setBundleInfo({ - bundle: { - id: 0, - entry, - path: bundleResult?.resolvedEntryPointPath ?? entry.file, - type: - bundleResult?.bundleType ?? - (entry.format === "modules" ? "esm" : "commonjs"), - modules: bundleResult - ? bundleResult.modules - : dedupeModulesByName([ - ...(traverseModuleGraphResult?.modules ?? []), - ...additionalModules, - ]), - dependencies: bundleResult?.dependencies ?? {}, - sourceMapPath: bundleResult?.sourceMapPath, - sourceMapMetadata: bundleResult?.sourceMapMetadata, - }, - moduleCollector: bundleResult?.moduleCollector, + setBundle({ + id: 0, + entry, + path: bundleResult?.resolvedEntryPointPath ?? entry.file, + type: bundleResult?.bundleType ?? getBundleType(entry.format), + modules: bundleResult ? bundleResult.modules : newAdditionalModules, + dependencies: bundleResult?.dependencies ?? {}, + sourceMapPath: bundleResult?.sourceMapPath, + sourceMapMetadata: bundleResult?.sourceMapMetadata, }); } @@ -234,6 +235,7 @@ export function useEsbuild({ tsconfig, exit, noBundle, + findAdditionalModules, minify, legacyNodeCompat, nodejsCompat, @@ -247,5 +249,5 @@ export function useEsbuild({ testScheduled, experimentalLocal, ]); - return bundleInfo?.bundle; + return bundle; } diff --git a/packages/wrangler/src/index.ts b/packages/wrangler/src/index.ts index 1ff075ea54cf..5ef5f9e22760 100644 --- a/packages/wrangler/src/index.ts +++ b/packages/wrangler/src/index.ts @@ -12,7 +12,7 @@ import { d1 } from "./d1"; import { deleteHandler, deleteOptions } from "./delete"; import { deployOptions, deployHandler } from "./deploy"; import { isAuthenticationError } from "./deploy/deploy"; -import { isBuildFailure } from "./deployment-bundle/bundle"; +import { isBuildFailure } from "./deployment-bundle/build-failures"; import { deployments, commonDeploymentCMDSetup, diff --git a/packages/wrangler/src/pages/build.ts b/packages/wrangler/src/pages/build.ts index 74f1904385a0..e50f288c5053 100644 --- a/packages/wrangler/src/pages/build.ts +++ b/packages/wrangler/src/pages/build.ts @@ -1,6 +1,7 @@ import { existsSync, lstatSync, mkdirSync, writeFileSync } from "node:fs"; import { basename, dirname, relative, resolve as resolvePath } from "node:path"; import { createUploadWorkerBundleContents } from "../api/pages/create-worker-bundle-contents"; +import { writeAdditionalModules } from "../deployment-bundle/find-additional-modules"; import { FatalError } from "../errors"; import { logger } from "../logger"; import * as metrics from "../metrics"; @@ -252,6 +253,10 @@ export const Handler = async (args: PagesBuildArgs) => { } } + if (outdir) { + await writeAdditionalModules(bundle.modules, outdir); + } + if (outfile) { const workerBundleContents = await createUploadWorkerBundleContents( bundle as BundleResult diff --git a/packages/wrangler/src/pages/buildFunctions.ts b/packages/wrangler/src/pages/buildFunctions.ts index b04c2cd5a840..c1d2fc8abdac 100644 --- a/packages/wrangler/src/pages/buildFunctions.ts +++ b/packages/wrangler/src/pages/buildFunctions.ts @@ -3,8 +3,8 @@ import { join, resolve } from "node:path"; import { FatalError } from "../errors"; import { toUrlPath } from "../paths"; import { FunctionsNoRoutesError } from "./errors"; -import { buildPlugin } from "./functions/buildPlugin"; -import { buildWorker } from "./functions/buildWorker"; +import { buildPluginFromFunctions } from "./functions/buildPlugin"; +import { buildWorkerFromFunctions } from "./functions/buildWorker"; import { generateConfigFromFileTree } from "./functions/filepath-routing"; import { writeRoutesModule } from "./functions/routes"; import { convertRoutesToRoutesJSONSpec } from "./functions/routes-transformation"; @@ -105,7 +105,7 @@ export async function buildFunctions({ ); } - bundle = await buildPlugin({ + bundle = await buildPluginFromFunctions({ routesModule, outdir, minify, @@ -116,7 +116,7 @@ export async function buildFunctions({ local, }); } else { - bundle = await buildWorker({ + bundle = await buildWorkerFromFunctions({ routesModule, outfile, outdir, diff --git a/packages/wrangler/src/pages/dev.ts b/packages/wrangler/src/pages/dev.ts index 68a75b4878bf..a0eb5acf952e 100644 --- a/packages/wrangler/src/pages/dev.ts +++ b/packages/wrangler/src/pages/dev.ts @@ -5,7 +5,7 @@ import { join, resolve } from "node:path"; import { watch } from "chokidar"; import * as esbuild from "esbuild"; import { unstable_dev } from "../api"; -import { isBuildFailure } from "../deployment-bundle/bundle"; +import { isBuildFailure } from "../deployment-bundle/build-failures"; import { esbuildAliasExternalPlugin } from "../deployment-bundle/esbuild-plugins/alias-external"; import { FatalError } from "../errors"; import { logger } from "../logger"; diff --git a/packages/wrangler/src/pages/functions/buildPlugin.ts b/packages/wrangler/src/pages/functions/buildPlugin.ts index 7f4478d9ec63..680e1a4787fe 100644 --- a/packages/wrangler/src/pages/functions/buildPlugin.ts +++ b/packages/wrangler/src/pages/functions/buildPlugin.ts @@ -1,8 +1,10 @@ import { access, lstat } from "node:fs/promises"; import { relative, resolve } from "node:path"; import { bundleWorker } from "../../deployment-bundle/bundle"; +import { createModuleCollector } from "../../deployment-bundle/module-collection"; import { getBasePath } from "../../paths"; import { buildNotifierPlugin } from "./buildWorker"; +import type { Entry } from "../../deployment-bundle/entry"; import type { Options as WorkerOptions } from "./buildWorker"; type Options = Omit< @@ -10,7 +12,7 @@ type Options = Omit< "outfile" | "fallbackService" | "buildOutputDirectory" | "nodejsCompat" > & { outdir: string }; -export function buildPlugin({ +export function buildPluginFromFunctions({ routesModule, outdir, minify = false, @@ -21,88 +23,87 @@ export function buildPlugin({ functionsDirectory, local, }: Options) { - return bundleWorker( - { - file: resolve(getBasePath(), "templates/pages-template-plugin.ts"), - directory: functionsDirectory, - format: "modules", - moduleRoot: functionsDirectory, - }, - resolve(outdir), - { - inject: [routesModule], - entryName: "index", - minify, - sourcemap, - watch, - legacyNodeCompat, - // We don't currently have a mechanism for Plugins 'requiring' a specific compat date/flag, - // but if someone wants to publish a Plugin which does require this new `nodejs_compat` flag - // and they document that on their README.md, we should let them. - nodejsCompat: true, - define: {}, - doBindings: [], // Pages functions don't support internal Durable Objects - plugins: [ - buildNotifierPlugin(onEnd), - { - name: "Assets", - setup(pluginBuild) { - pluginBuild.onResolve({ filter: /^assets:/ }, async (args) => { - const directory = resolve( - args.resolveDir, - args.path.slice("assets:".length) - ); + const entry: Entry = { + file: resolve(getBasePath(), "templates/pages-template-plugin.ts"), + directory: functionsDirectory, + format: "modules", + moduleRoot: functionsDirectory, + }; + const moduleCollector = createModuleCollector({ + entry, + findAdditionalModules: false, + }); + return bundleWorker(entry, resolve(outdir), { + bundle: true, + additionalModules: [], + moduleCollector, + inject: [routesModule], + entryName: "index", + minify, + sourcemap, + watch, + legacyNodeCompat, + // We don't currently have a mechanism for Plugins 'requiring' a specific compat date/flag, + // but if someone wants to publish a Plugin which does require this new `nodejs_compat` flag + // and they document that on their README.md, we should let them. + nodejsCompat: true, + define: {}, + doBindings: [], // Pages functions don't support internal Durable Objects + plugins: [ + buildNotifierPlugin(onEnd), + { + name: "Assets", + setup(pluginBuild) { + pluginBuild.onResolve({ filter: /^assets:/ }, async (args) => { + const directory = resolve( + args.resolveDir, + args.path.slice("assets:".length) + ); - const exists = await access(directory) - .then(() => true) - .catch(() => false); + const exists = await access(directory) + .then(() => true) + .catch(() => false); - const isDirectory = - exists && (await lstat(directory)).isDirectory(); + const isDirectory = + exists && (await lstat(directory)).isDirectory(); - if (!isDirectory) { - return { - errors: [ - { - text: `'${directory}' does not exist or is not a directory.`, - }, - ], - }; - } + if (!isDirectory) { + return { + errors: [ + { + text: `'${directory}' does not exist or is not a directory.`, + }, + ], + }; + } - const path = `assets:./${relative(outdir, directory)}`; + const path = `assets:./${relative(outdir, directory)}`; - return { path, external: true, namespace: "assets" }; - }); - }, + return { path, external: true, namespace: "assets" }; + }); }, - // TODO: Replace this with a proper outdir solution for Plugins - // But for now, let's just mark all wasm/bin files as external - { - name: "Mark externals", - setup(pluginBuild) { - pluginBuild.onResolve( - { filter: /.*\.(wasm|bin)$/ }, - async (args) => { - return { - external: true, - path: `./${relative( - outdir, - resolve(args.resolveDir, args.path) - )}`, - }; - } - ); - }, + }, + // TODO: Replace this with a proper outdir solution for Plugins + // But for now, let's just mark all wasm/bin files as external + { + name: "Mark externals", + setup(pluginBuild) { + pluginBuild.onResolve({ filter: /.*\.(wasm|bin)$/ }, async (args) => { + return { + external: true, + path: `./${relative( + outdir, + resolve(args.resolveDir, args.path) + )}`, + }; + }); }, - ], - serveAssetsFromWorker: false, - disableModuleCollection: false, - rules: [], - checkFetch: local, - targetConsumer: local ? "dev" : "deploy", - forPages: true, - local, - } - ); + }, + ], + serveAssetsFromWorker: false, + checkFetch: local, + targetConsumer: local ? "dev" : "deploy", + forPages: true, + local, + }); } diff --git a/packages/wrangler/src/pages/functions/buildWorker.ts b/packages/wrangler/src/pages/functions/buildWorker.ts index b86b1ba42935..77492747cc4c 100644 --- a/packages/wrangler/src/pages/functions/buildWorker.ts +++ b/packages/wrangler/src/pages/functions/buildWorker.ts @@ -3,12 +3,17 @@ import { join, resolve } from "node:path"; import { build as esBuild } from "esbuild"; import { nanoid } from "nanoid"; import { bundleWorker } from "../../deployment-bundle/bundle"; -import traverseModuleGraph from "../../deployment-bundle/traverse-module-graph"; +import { findAdditionalModules } from "../../deployment-bundle/find-additional-modules"; +import { + createModuleCollector, + noopModuleCollector, +} from "../../deployment-bundle/module-collection"; import { FatalError } from "../../errors"; import { logger } from "../../logger"; import { getBasePath } from "../../paths"; import { realTmpdir } from "../utils"; import type { BundleResult } from "../../deployment-bundle/bundle"; +import type { Entry } from "../../deployment-bundle/entry"; import type { CfModule } from "../../deployment-bundle/worker"; import type { Plugin } from "esbuild"; @@ -28,7 +33,7 @@ export type Options = { local: boolean; }; -export function buildWorker({ +export function buildWorkerFromFunctions({ routesModule, outfile = join(realTmpdir(), `./functionsWorker-${Math.random()}.js`), outdir, @@ -43,113 +48,116 @@ export function buildWorker({ functionsDirectory, local, }: Options) { - return bundleWorker( - { - file: resolve(getBasePath(), "templates/pages-template-worker.ts"), - directory: functionsDirectory, - format: "modules", - moduleRoot: functionsDirectory, + const entry: Entry = { + file: resolve(getBasePath(), "templates/pages-template-worker.ts"), + directory: functionsDirectory, + format: "modules", + moduleRoot: functionsDirectory, + }; + const moduleCollector = createModuleCollector({ + entry, + findAdditionalModules: false, + }); + + return bundleWorker(entry, outdir ? resolve(outdir) : resolve(outfile), { + bundle: true, + additionalModules: [], + moduleCollector, + inject: [routesModule], + ...(outdir ? { entryName: "index" } : {}), + minify, + sourcemap, + watch, + legacyNodeCompat, + nodejsCompat, + define: { + __FALLBACK_SERVICE__: JSON.stringify(fallbackService), }, - outdir ? resolve(outdir) : resolve(outfile), - { - inject: [routesModule], - ...(outdir ? { entryName: "index" } : {}), - minify, - sourcemap, - watch, - legacyNodeCompat, - nodejsCompat, - define: { - __FALLBACK_SERVICE__: JSON.stringify(fallbackService), - }, - doBindings: [], // Pages functions don't support internal Durable Objects - plugins: [ - buildNotifierPlugin(onEnd), - { - name: "Assets", - setup(pluginBuild) { - const identifiers = new Map(); + doBindings: [], // Pages functions don't support internal Durable Objects + plugins: [ + buildNotifierPlugin(onEnd), + { + name: "Assets", + setup(pluginBuild) { + const identifiers = new Map(); - pluginBuild.onResolve({ filter: /^assets:/ }, async (args) => { - const directory = resolve( - args.resolveDir, - args.path.slice("assets:".length) - ); + pluginBuild.onResolve({ filter: /^assets:/ }, async (args) => { + const directory = resolve( + args.resolveDir, + args.path.slice("assets:".length) + ); - const exists = await access(directory) - .then(() => true) - .catch(() => false); + const exists = await access(directory) + .then(() => true) + .catch(() => false); - const isDirectory = - exists && (await lstat(directory)).isDirectory(); + const isDirectory = + exists && (await lstat(directory)).isDirectory(); - if (!isDirectory) { - return { - errors: [ - { - text: `'${directory}' does not exist or is not a directory.`, - }, - ], - }; - } + if (!isDirectory) { + return { + errors: [ + { + text: `'${directory}' does not exist or is not a directory.`, + }, + ], + }; + } - // TODO: Consider hashing the contents rather than using a unique identifier every time? - identifiers.set(directory, nanoid()); - if (!buildOutputDirectory) { - console.warn( - "You're attempting to import static assets as part of your Pages Functions, but have not specified a directory in which to put them. You must use 'wrangler pages dev ' rather than 'wrangler pages dev -- ' to import static assets in Functions." - ); - } - return { path: directory, namespace: "assets" }; - }); + // TODO: Consider hashing the contents rather than using a unique identifier every time? + identifiers.set(directory, nanoid()); + if (!buildOutputDirectory) { + console.warn( + "You're attempting to import static assets as part of your Pages Functions, but have not specified a directory in which to put them. You must use 'wrangler pages dev ' rather than 'wrangler pages dev -- ' to import static assets in Functions." + ); + } + return { path: directory, namespace: "assets" }; + }); - pluginBuild.onLoad( - { filter: /.*/, namespace: "assets" }, - async (args) => { - const identifier = identifiers.get(args.path); + pluginBuild.onLoad( + { filter: /.*/, namespace: "assets" }, + async (args) => { + const identifier = identifiers.get(args.path); - if (buildOutputDirectory) { - const staticAssetsOutputDirectory = join( - buildOutputDirectory, - "cdn-cgi", - "pages-plugins", - identifier as string - ); - await rm(staticAssetsOutputDirectory, { - force: true, - recursive: true, - }); - await cp(args.path, staticAssetsOutputDirectory, { - force: true, - recursive: true, - }); + if (buildOutputDirectory) { + const staticAssetsOutputDirectory = join( + buildOutputDirectory, + "cdn-cgi", + "pages-plugins", + identifier as string + ); + await rm(staticAssetsOutputDirectory, { + force: true, + recursive: true, + }); + await cp(args.path, staticAssetsOutputDirectory, { + force: true, + recursive: true, + }); - return { - // TODO: Watch args.path for changes and re-copy when updated - contents: `export const onRequest = ({ request, env, functionPath }) => { + return { + // TODO: Watch args.path for changes and re-copy when updated + contents: `export const onRequest = ({ request, env, functionPath }) => { const url = new URL(request.url) const relativePathname = \`/\${url.pathname.replace(functionPath, "") || ""}\`.replace(/^\\/\\//, '/'); url.pathname = '/cdn-cgi/pages-plugins/${identifier}' + relativePathname request = new Request(url.toString(), request) return env.ASSETS.fetch(request) }`, - }; - } + }; } - ); - }, + } + ); }, - ], - isOutfile: !outdir, - serveAssetsFromWorker: false, - disableModuleCollection: false, - rules: [], - checkFetch: local, - targetConsumer: local ? "dev" : "deploy", - forPages: true, - local, - } - ); + }, + ], + isOutfile: !outdir, + serveAssetsFromWorker: false, + checkFetch: local, + targetConsumer: local ? "dev" : "deploy", + forPages: true, + local, + }); } export type RawOptions = { @@ -193,58 +201,56 @@ export function buildRawWorker({ legacyNodeCompat, nodejsCompat, local, - additionalModules, + additionalModules = [], }: RawOptions) { - return bundleWorker( - { - file: workerScriptPath, - directory: resolve(directory), - format: "modules", - moduleRoot: resolve(directory), - }, - outdir ? resolve(outdir) : resolve(outfile), - { - bundle, - minify, - sourcemap, - watch, - legacyNodeCompat, - nodejsCompat, - define: {}, - doBindings: [], // Pages functions don't support internal Durable Objects - plugins: [ - ...plugins, - buildNotifierPlugin(onEnd), - ...(external - ? [ - // In some cases, we want to enable bundling in esbuild so that we can flatten a shim around the entrypoint, but we still don't want to actually bundle in all the chunks that a Worker references. - // This plugin allows us to mark those chunks as external so they are not inlined. - { - name: "external-fixer", - setup(pluginBuild) { - pluginBuild.onResolve({ filter: /.*/ }, async (args) => { - if ( - external.includes(resolve(args.resolveDir, args.path)) - ) { - return { path: args.path, external: true }; - } - }); - }, - } as Plugin, - ] - : []), - ], - isOutfile: !outdir, - serveAssetsFromWorker: false, - disableModuleCollection: external ? true : false, - rules: [], - checkFetch: local, - targetConsumer: local ? "dev" : "deploy", - forPages: true, - additionalModules, - local, - } - ); + const entry: Entry = { + file: workerScriptPath, + directory: resolve(directory), + format: "modules", + moduleRoot: resolve(directory), + }; + const moduleCollector = external + ? noopModuleCollector + : createModuleCollector({ entry, findAdditionalModules: false }); + + return bundleWorker(entry, outdir ? resolve(outdir) : resolve(outfile), { + bundle, + moduleCollector, + additionalModules, + minify, + sourcemap, + watch, + legacyNodeCompat, + nodejsCompat, + define: {}, + doBindings: [], // Pages functions don't support internal Durable Objects + plugins: [ + ...plugins, + buildNotifierPlugin(onEnd), + ...(external + ? [ + // In some cases, we want to enable bundling in esbuild so that we can flatten a shim around the entrypoint, but we still don't want to actually bundle in all the chunks that a Worker references. + // This plugin allows us to mark those chunks as external so they are not inlined. + { + name: "external-fixer", + setup(pluginBuild) { + pluginBuild.onResolve({ filter: /.*/ }, async (args) => { + if (external.includes(resolve(args.resolveDir, args.path))) { + return { path: args.path, external: true }; + } + }); + }, + } as Plugin, + ] + : []), + ], + isOutfile: !outdir, + serveAssetsFromWorker: false, + checkFetch: local, + targetConsumer: local ? "dev" : "deploy", + forPages: true, + local, + }); } export async function traverseAndBuildWorkerJSDirectory({ @@ -258,7 +264,7 @@ export async function traverseAndBuildWorkerJSDirectory({ }): Promise { const entrypoint = resolve(join(workerJSDirectory, "index.js")); - const traverseModuleGraphResult = await traverseModuleGraph( + const additionalModules = await findAdditionalModules( { file: entrypoint, directory: resolve(workerJSDirectory), @@ -277,9 +283,7 @@ export async function traverseAndBuildWorkerJSDirectory({ const bundleResult = await buildRawWorker({ workerScriptPath: entrypoint, bundle: true, - external: traverseModuleGraphResult.modules.map((m) => - join(workerJSDirectory, m.name) - ), + external: additionalModules.map((m) => join(workerJSDirectory, m.name)), outfile, directory: buildOutputDirectory, local: false, @@ -287,7 +291,7 @@ export async function traverseAndBuildWorkerJSDirectory({ watch: false, onEnd: () => {}, nodejsCompat, - additionalModules: traverseModuleGraphResult.modules, + additionalModules, }); return { @@ -297,7 +301,6 @@ export async function traverseAndBuildWorkerJSDirectory({ bundleType: bundleResult.bundleType, stop: bundleResult.stop, sourceMapPath: bundleResult.sourceMapPath, - moduleCollector: bundleResult.moduleCollector, }; } diff --git a/packages/wrangler/src/whoami.ts b/packages/wrangler/src/whoami.ts index 5b94a3c5947f..c3b166ba8554 100644 --- a/packages/wrangler/src/whoami.ts +++ b/packages/wrangler/src/whoami.ts @@ -11,7 +11,6 @@ export async function whoami() { "You are not authenticated. Please run `wrangler login`." ); } - if (user.email !== undefined) { logger.log( `👋 You are logged in with an ${ diff --git a/packages/wrangler/templates/modules-watch-stub.js b/packages/wrangler/templates/modules-watch-stub.js new file mode 100644 index 000000000000..f66d8d48fb72 --- /dev/null +++ b/packages/wrangler/templates/modules-watch-stub.js @@ -0,0 +1,4 @@ +// `esbuild` doesn't support returning `watch*` options from `onStart()` +// plugin callbacks. Instead, we define an empty virtual module that is +// imported by this injected file. Importing the module registers watchers. +import "wrangler:modules-watch"; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c2e1458dfa3b..f860181730e6 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -77,6 +77,21 @@ importers: specifier: ^1.10.14 version: 1.10.14 + fixtures/additional-modules: + devDependencies: + '@cloudflare/workers-tsconfig': + specifier: workspace:* + version: link:../../packages/workers-tsconfig + '@cloudflare/workers-types': + specifier: ^4.20230724.0 + version: 4.20230821.0 + undici: + specifier: ^5.9.1 + version: 5.23.0 + wrangler: + specifier: workspace:* + version: link:../../packages/wrangler + fixtures/d1-worker-app: devDependencies: wrangler: @@ -393,7 +408,11 @@ importers: specifier: workspace:* version: link:../../packages/wrangler - fixtures/shared: {} + fixtures/shared: + devDependencies: + wrangler: + specifier: workspace:* + version: link:../../packages/wrangler fixtures/sites-app: dependencies: