Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
147 changes: 145 additions & 2 deletions tools/egg-bundler/src/lib/Bundler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,16 @@ const debug = debuglog('egg/bundler/bundler');

const BUNDLE_MANIFEST_VERSION = 1;
const BUNDLE_MANIFEST_FILENAME = 'bundle-manifest.json';
const IMPORT_META_FALLBACK_FILENAME_EXPR =
'(() => { const entryArg = typeof process !== "undefined" && process.argv && process.argv[1] ? process.argv[1] : "worker.js"; if (/^(?:[A-Za-z]:[\\\\/]|\\\\\\\\|\\/)/.test(entryArg)) return entryArg; const cwd = typeof process !== "undefined" && process.cwd ? process.cwd() : "."; const raw = cwd + "/" + entryArg; const parts = []; for (const part of raw.replace(/\\\\/g, "/").split("/")) { if (!part || part === ".") continue; if (part === "..") parts.pop(); else parts.push(part); } return (raw.startsWith("/") ? "/" : "") + parts.join("/"); })()';
Copy link

Copilot AI May 1, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

IMPORT_META_FALLBACK_FILENAME_EXPR normalizes to POSIX separators by splitting on "/" after replacing backslashes, then rejoins with "/". On Windows this makes the computed filename/dirname differ from Node’s __filename/__dirname (which use backslashes), and the integration test expectations (path.join(...)) will also fail on Windows runners. Consider preserving the platform separator (e.g., detect cwd.includes('\\') and join with \\), or avoid manual joining by using a small require('node:path').resolve(process.cwd(), entryArg) expression in the emitted code.

Suggested change
'(() => { const entryArg = typeof process !== "undefined" && process.argv && process.argv[1] ? process.argv[1] : "worker.js"; if (/^(?:[A-Za-z]:[\\\\/]|\\\\\\\\|\\/)/.test(entryArg)) return entryArg; const cwd = typeof process !== "undefined" && process.cwd ? process.cwd() : "."; const raw = cwd + "/" + entryArg; const parts = []; for (const part of raw.replace(/\\\\/g, "/").split("/")) { if (!part || part === ".") continue; if (part === "..") parts.pop(); else parts.push(part); } return (raw.startsWith("/") ? "/" : "") + parts.join("/"); })()';
'(() => { const entryArg = typeof process !== "undefined" && process.argv && process.argv[1] ? process.argv[1] : "worker.js"; if (/^(?:[A-Za-z]:[\\\\/]|\\\\\\\\|\\/)/.test(entryArg)) return entryArg; const cwd = typeof process !== "undefined" && process.cwd ? process.cwd() : "."; return require("node:path").resolve(cwd, entryArg); })()';

Copilot uses AI. Check for mistakes.
const IMPORT_META_FILENAME_EXPR = `(typeof __filename === "string" ? __filename : ${IMPORT_META_FALLBACK_FILENAME_EXPR})`;
const IMPORT_META_URL_EXPR = `(() => { const u = new URL("file:///"); u.pathname = ${IMPORT_META_FILENAME_EXPR}.replace(/\\\\/g, "/"); return u.href; })()`;
const THROWING_IMPORT_META_URL =
/\(\(\)\s*=>\s*\{\s*throw\s+new\s+Error\(\s*['"][^'"]*import\.meta\.url[^'"]*['"]\s*\)\s*;?\s*\}\)\s*\(\)/g;
const TURBOPACK_IMPORT_META_OBJECT =
/\b(var|let|const)\s+([A-Za-z_$][\w$]*import\$2e\$meta__[A-Za-z0-9_$]*)\s*=\s*\{\s*get\s+url\s*\(\)\s*\{[\s\S]*?\}\s*\};?/g;
const LINE_SOURCE_MAP_URL = /(?:\r?\n)?\/\/# sourceMappingURL=([^\r\n]*)\s*$/;
const BLOCK_SOURCE_MAP_URL = /(?:\r?\n)?\/\*# sourceMappingURL=([\s\S]*?)\*\/\s*$/;

interface BundleManifest {
readonly version: number;
Expand Down Expand Up @@ -90,6 +100,15 @@ export class Bundler {
const packResult = await wrapStep('pack build', () => packRunner.run());
debug('pack produced %d files', packResult.files.length);

const patchResult = await wrapStep('patch import.meta output', () =>
this.#patchImportMetaOutput(absOutputDir, packResult.files),
);
debug(
'patched %d import.meta output occurrences and removed %d sourcemaps',
patchResult.patchCount,
patchResult.deletedMapCount,
);

// Merge project name into output package.json so the framework's
// getAppname() finds it (it reads baseDir/package.json).
const outputPkgPath = path.join(absOutputDir, 'package.json');
Expand All @@ -113,14 +132,14 @@ export class Bundler {
framework,
entries: [{ name: 'worker', source: entries.workerEntry }],
externals: Object.keys(externalsMap).sort((a, b) => a.localeCompare(b)),
chunks: packResult.files,
chunks: patchResult.outputFiles,
};
await wrapStep('write bundle-manifest', () =>
fs.writeFile(manifestPathAbs, JSON.stringify(bundleManifest, null, 2)),
);

// Re-enumerate files so bundle-manifest.json is included in the result.
const finalRelFiles = new Set<string>(packResult.files);
const finalRelFiles = new Set<string>(patchResult.outputFiles);
finalRelFiles.add(BUNDLE_MANIFEST_FILENAME);
const files = Array.from(finalRelFiles)
.map((rel) => path.join(absOutputDir, rel))
Expand All @@ -132,4 +151,128 @@ export class Bundler {
manifestPath: manifestPathAbs,
};
}

async #patchImportMetaOutput(
outputDir: string,
inputFiles: readonly string[],
): Promise<{ patchCount: number; deletedMapCount: number; outputFiles: readonly string[] }> {
let patchCount = 0;
let deletedMapCount = 0;
const files = inputFiles.map((rel) => this.#sanitizeOutputRelativePath(rel)).sort((a, b) => a.localeCompare(b));
const deletedFiles = new Set<string>();

for (const rel of files) {
if (!rel.endsWith('.js')) continue;

const filepath = path.join(outputDir, rel);
const content = await fs.readFile(filepath, 'utf8');

let metaMatches = 0;
let patched = content.replace(
TURBOPACK_IMPORT_META_OBJECT,
(_match, declarationKind: string, metaName: string) => {
metaMatches++;
return this.#renderImportMetaObject(declarationKind, metaName);
},
);

const urlMatches = patched.match(THROWING_IMPORT_META_URL);
patched = patched.replace(THROWING_IMPORT_META_URL, IMPORT_META_URL_EXPR);

const patchesForFile = (urlMatches?.length ?? 0) + metaMatches;
if (patchesForFile === 0) continue;

const stripped = this.#stripSourceMappingUrl(patched);
await fs.writeFile(filepath, stripped);

patchCount += patchesForFile;
const staleMaps = await this.#deleteStaleSourceMaps(outputDir, filepath, content);
deletedMapCount += staleMaps.deletedCount;
for (const deleted of staleMaps.deletedFiles) deletedFiles.add(deleted);
debug('patched %d import.meta output occurrences in %s', patchesForFile, rel);
}

const outputFiles = files.filter((rel) => !deletedFiles.has(rel));
return { patchCount, deletedMapCount, outputFiles };
}

#renderImportMetaObject(declarationKind: string, metaName: string): string {
return `${declarationKind} ${metaName} = (() => {
const filename = ${IMPORT_META_FILENAME_EXPR};
const dirname = typeof __dirname === "string" ? __dirname : filename.replace(/[\\\\/][^\\\\/]*$/, "");
const url = (() => { const u = new URL("file:///"); u.pathname = filename.replace(/\\\\/g, "/"); return u.href; })();
return {
get url () {
return url;
},
get dirname () {
return dirname;
},
get filename () {
return filename;
}
};
})();`;
}

#stripSourceMappingUrl(content: string): string {
return content.replace(LINE_SOURCE_MAP_URL, '').replace(BLOCK_SOURCE_MAP_URL, '');
}

async #deleteStaleSourceMaps(
outputDir: string,
filepath: string,
originalContent: string,
): Promise<{ deletedCount: number; deletedFiles: readonly string[] }> {
const mapPaths = new Set<string>([`${filepath}.map`]);
const sourceMapUrl = this.#extractSourceMappingUrl(originalContent);
if (sourceMapUrl && !sourceMapUrl.startsWith('data:')) {
const resolved = path.resolve(path.dirname(filepath), sourceMapUrl);
if (this.#isInsideDir(outputDir, resolved)) mapPaths.add(resolved);
Copy link

Copilot AI May 1, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

#deleteStaleSourceMaps will delete any file referenced by sourceMappingURL as long as it resolves inside outputDir (not just *.map). A malformed or unexpected sourceMappingURL could cause deletion of non-sourcemap artifacts in the bundle output. It’s safer to only delete the implicit ${filepath}.map and (if present) an extracted sourceMappingURL target when it ends with .map (or otherwise matches an allowlist).

Copilot uses AI. Check for mistakes.
}

let deletedCount = 0;
const deletedFiles: string[] = [];
for (const mapPath of mapPaths) {
if (!this.#isInsideDir(outputDir, mapPath)) continue;
try {
await fs.unlink(mapPath);
deletedCount++;
deletedFiles.push(
this.#sanitizeOutputRelativePath(path.relative(outputDir, mapPath).split(path.sep).join('/')),
);
} catch (err) {
if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err;
}
}
return { deletedCount, deletedFiles };
}

#extractSourceMappingUrl(content: string): string | undefined {
const lineMatch = content.match(LINE_SOURCE_MAP_URL);
if (lineMatch?.[1]) return lineMatch[1].trim();
const blockMatch = content.match(BLOCK_SOURCE_MAP_URL);
if (blockMatch?.[1]) return blockMatch[1].trim();
return undefined;
}

#sanitizeOutputRelativePath(relativeName: string): string {
const normalized = relativeName.split(path.sep).join('/');
const segments = normalized.split('/');
if (
!normalized ||
path.posix.isAbsolute(normalized) ||
Copy link

Copilot AI May 1, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

#sanitizeOutputRelativePath() only checks path.posix.isAbsolute(normalized). That will not treat Windows drive-absolute paths like "C:/foo.js" as absolute, so such paths could slip through as “safe” and later be interpreted as absolute on Windows (e.g., path.join(outputDir, rel) ignoring outputDir). Consider also rejecting win32-absolute patterns (e.g., path.win32.isAbsolute(normalized) or a /^[A-Za-z]:// check after normalization).

Suggested change
path.posix.isAbsolute(normalized) ||
path.posix.isAbsolute(normalized) ||
path.win32.isAbsolute(normalized) ||

Copilot uses AI. Check for mistakes.
segments.some((segment) => !segment || segment === '.' || segment === '..') ||
normalized.includes('\0') ||
/[\r\n\u2028\u2029]/u.test(normalized)
) {
throw new Error(`Unsafe bundle output path: ${relativeName}`);
}
return normalized;
}

#isInsideDir(dir: string, target: string): boolean {
const rel = path.relative(dir, target);
return rel === '' || (!rel.startsWith('..') && !path.isAbsolute(rel));
}
}
111 changes: 110 additions & 1 deletion tools/egg-bundler/test/integration.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import fs from 'node:fs/promises';
import os from 'node:os';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { fileURLToPath, pathToFileURL } from 'node:url';
import { runInNewContext } from 'node:vm';

import { afterEach, beforeEach, describe, expect, it } from 'vitest';

Expand Down Expand Up @@ -183,6 +184,114 @@ describe('bundle() integration — minimal-app (Phase 1: mocked @utoo/pack)', ()
expect(bm.externals).toContain('synthetic-force-ext');
});

it('patches nested Turbopack import.meta chunks and removes stale sourcemaps', async () => {
const sourceMapToken = 'sourceMapping' + 'URL';
const throwingMeta = `var __TURBOPACK__import$2e$meta__ = {
get url () {
return (() => { throw new Error("could not convert import.meta.url to filepath"); })();
}
};
globalThis.__patchedMeta = {
url: __TURBOPACK__import$2e$meta__.url,
dirname: __TURBOPACK__import$2e$meta__.dirname,
filename: __TURBOPACK__import$2e$meta__.filename
};
//# ${sourceMapToken}=chunk #.js.map
`;
const urlOnlyMeta = `let __TURBOPACK__import$2e$meta__ = { get url () { return "file:///already-patched.js"; } };
globalThis.__patchedMeta = {
url: __TURBOPACK__import$2e$meta__.url,
dirname: __TURBOPACK__import$2e$meta__.dirname,
filename: __TURBOPACK__import$2e$meta__.filename
};
/*# ${sourceMapToken}=url-only.js.map */
`;

const buildFunc: BuildFunc = async () => {
await fs.writeFile(path.join(tmpOutput, 'worker.js'), '// mock worker entry\n');
await fs.mkdir(path.join(tmpOutput, 'chunks'), { recursive: true });
await fs.writeFile(path.join(tmpOutput, 'chunks/chunk #.js'), throwingMeta);
await fs.writeFile(path.join(tmpOutput, 'chunks/chunk #.js.map'), '{"version":3}');
await fs.writeFile(path.join(tmpOutput, 'chunks/url-only.js'), urlOnlyMeta);
Comment thread
killagu marked this conversation as resolved.
await fs.writeFile(path.join(tmpOutput, 'chunks/url-only.js.map'), '{"version":3}');
};

const result = await bundle({
baseDir: tmpApp,
outputDir: tmpOutput,
pack: { buildFunc },
});
const bm = JSON.parse(await fs.readFile(result.manifestPath, 'utf8')) as { chunks: string[] };

async function runPatchedChunk(
filepath: string,
options: { argv: string[]; filename?: string },
): Promise<{ url: string; dirname: string; filename: string }> {
interface SandboxProcess {
argv: string[];
cwd: () => string;
}
interface Sandbox {
URL: typeof URL;
process: SandboxProcess;
globalThis: Sandbox;
__dirname?: string;
__filename?: string;
__patchedMeta?: { url: string; dirname: string; filename: string };
}
const sandbox = {
URL,
process: { argv: options.argv, cwd: () => tmpOutput },
} as unknown as Sandbox;
if (options.filename) {
sandbox.__filename = options.filename;
sandbox.__dirname = path.dirname(options.filename);
}
sandbox.globalThis = sandbox;
runInNewContext(await fs.readFile(filepath, 'utf8'), sandbox);
return sandbox.__patchedMeta!;
}

function expectedFileUrl(filename: string): string {
return pathToFileURL(filename).href;
}

const nestedFilename = path.join(tmpOutput, 'chunks/chunk #.js');
const nestedMeta = await runPatchedChunk(nestedFilename, { argv: ['node', 'worker.js'], filename: nestedFilename });
expect(nestedMeta).toEqual({
url: expectedFileUrl(nestedFilename),
dirname: path.dirname(nestedFilename),
filename: nestedFilename,
});

const urlOnlyFilename = path.join(tmpOutput, 'chunks/url-only.js');
const urlOnlyPatched = await fs.readFile(urlOnlyFilename, 'utf8');
expect(urlOnlyPatched).not.toContain('already-patched.js');
const urlOnlyMetaResult = await runPatchedChunk(urlOnlyFilename, { argv: ['node'], filename: urlOnlyFilename });
expect(urlOnlyMetaResult).toEqual({
url: expectedFileUrl(urlOnlyFilename),
dirname: path.dirname(urlOnlyFilename),
filename: urlOnlyFilename,
});

const fallbackFilename = path.join(tmpOutput, 'worker.js');
const fallbackMetaResult = await runPatchedChunk(urlOnlyFilename, { argv: ['node', './worker.js'] });
expect(fallbackMetaResult).toEqual({
url: expectedFileUrl(fallbackFilename),
dirname: tmpOutput,
filename: fallbackFilename,
});

for (const name of ['chunks/chunk #.js', 'chunks/url-only.js']) {
const content = await fs.readFile(path.join(tmpOutput, name), 'utf8');
expect(content).not.toContain(sourceMapToken);
}
await expect(fs.stat(path.join(tmpOutput, 'chunks/chunk #.js.map'))).rejects.toMatchObject({ code: 'ENOENT' });
await expect(fs.stat(path.join(tmpOutput, 'chunks/url-only.js.map'))).rejects.toMatchObject({ code: 'ENOENT' });
expect(result.files).not.toEqual(expect.arrayContaining([expect.stringContaining('.js.map')]));
expect(bm.chunks).not.toEqual(expect.arrayContaining([expect.stringContaining('.js.map')]));
});

it('wraps a buildFunc failure under the "pack build" step with an identifiable prefix and preserves cause', async () => {
const original = new Error('synthetic pack failure');
const buildFunc: BuildFunc = async () => {
Expand Down
Loading