mirror of
https://github.com/microsoft/playwright.git
synced 2025-01-05 19:04:43 +03:00
fix(esmLoader): do not serialize the whole compilation cache after each import (#29464)
Instead, send the newly created cache entry only. References #29418.
This commit is contained in:
parent
b947639d3a
commit
f605a5009b
@ -60,6 +60,9 @@ export async function stopCollectingFileDeps(file: string) {
|
||||
export async function incorporateCompilationCache() {
|
||||
if (!loaderChannel)
|
||||
return;
|
||||
// This is needed to gather dependency information from the esm loader
|
||||
// that is populated from the resovle hook. We do not need to push
|
||||
// this information proactively during load, but gather it at the end.
|
||||
const result = await loaderChannel.send('getCompilationCache', {});
|
||||
addToCompilationCache(result.cache);
|
||||
}
|
||||
|
@ -92,12 +92,24 @@ export function installSourceMapSupportIfNeeded() {
|
||||
});
|
||||
}
|
||||
|
||||
function _innerAddToCompilationCache(filename: string, entry: MemoryCache) {
|
||||
function _innerAddToCompilationCacheAndSerialize(filename: string, entry: MemoryCache) {
|
||||
sourceMaps.set(entry.moduleUrl || filename, entry.sourceMapPath);
|
||||
memoryCache.set(filename, entry);
|
||||
return {
|
||||
sourceMaps: [[entry.moduleUrl || filename, entry.sourceMapPath]],
|
||||
memoryCache: [[filename, entry]],
|
||||
fileDependencies: [],
|
||||
externalDependencies: [],
|
||||
};
|
||||
}
|
||||
|
||||
export function getFromCompilationCache(filename: string, hash: string, moduleUrl?: string): { cachedCode?: string, addToCache?: (code: string, map: any | undefined | null, data: Map<string, any>) => void } {
|
||||
type CompilationCacheLookupResult = {
|
||||
serializedCache?: any;
|
||||
cachedCode?: string;
|
||||
addToCache?: (code: string, map: any | undefined | null, data: Map<string, any>) => { serializedCache?: any };
|
||||
};
|
||||
|
||||
export function getFromCompilationCache(filename: string, hash: string, moduleUrl?: string): CompilationCacheLookupResult {
|
||||
// First check the memory cache by filename, this cache will always work in the worker,
|
||||
// because we just compiled this file in the loader.
|
||||
const cache = memoryCache.get(filename);
|
||||
@ -116,22 +128,23 @@ export function getFromCompilationCache(filename: string, hash: string, moduleUr
|
||||
const dataPath = cachePath + '.data';
|
||||
try {
|
||||
const cachedCode = fs.readFileSync(codePath, 'utf8');
|
||||
_innerAddToCompilationCache(filename, { codePath, sourceMapPath, dataPath, moduleUrl });
|
||||
return { cachedCode };
|
||||
const serializedCache = _innerAddToCompilationCacheAndSerialize(filename, { codePath, sourceMapPath, dataPath, moduleUrl });
|
||||
return { cachedCode, serializedCache };
|
||||
} catch {
|
||||
}
|
||||
|
||||
return {
|
||||
addToCache: (code: string, map: any | undefined | null, data: Map<string, any>) => {
|
||||
if (isWorkerProcess())
|
||||
return;
|
||||
return {};
|
||||
fs.mkdirSync(path.dirname(cachePath), { recursive: true });
|
||||
if (map)
|
||||
fs.writeFileSync(sourceMapPath, JSON.stringify(map), 'utf8');
|
||||
if (data.size)
|
||||
fs.writeFileSync(dataPath, JSON.stringify(Object.fromEntries(data.entries()), undefined, 2), 'utf8');
|
||||
fs.writeFileSync(codePath, code, 'utf8');
|
||||
_innerAddToCompilationCache(filename, { codePath, sourceMapPath, dataPath, moduleUrl });
|
||||
const serializedCache = _innerAddToCompilationCacheAndSerialize(filename, { codePath, sourceMapPath, dataPath, moduleUrl });
|
||||
return { serializedCache };
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -30,6 +30,8 @@ async function resolve(specifier: string, context: { parentURL?: string }, defau
|
||||
specifier = url.pathToFileURL(resolved).toString();
|
||||
}
|
||||
const result = await defaultResolve(specifier, context, defaultResolve);
|
||||
// Note: we collect dependencies here that will be sent to the main thread
|
||||
// (and optionally runner process) after the loading finishes.
|
||||
if (result?.url && result.url.startsWith('file://'))
|
||||
currentFileDepsCollector()?.add(url.fileURLToPath(result.url));
|
||||
|
||||
@ -54,14 +56,15 @@ async function load(moduleUrl: string, context: { format?: string }, defaultLoad
|
||||
return defaultLoad(moduleUrl, context, defaultLoad);
|
||||
|
||||
const code = fs.readFileSync(filename, 'utf-8');
|
||||
const source = transformHook(code, filename, moduleUrl);
|
||||
const transformed = transformHook(code, filename, moduleUrl);
|
||||
|
||||
// Flush the source maps to the main thread.
|
||||
await transport?.send('pushToCompilationCache', { cache: serializeCompilationCache() });
|
||||
// Flush the source maps to the main thread, so that errors during import() are source-mapped.
|
||||
if (transformed.serializedCache)
|
||||
await transport?.send('pushToCompilationCache', { cache: transformed.serializedCache });
|
||||
|
||||
// Output format is always the same as input format, if it was unknown, we always report modules.
|
||||
// shortCircuit is required by Node >= 18.6 to designate no more loaders should be called.
|
||||
return { format: context.format || 'module', source, shortCircuit: true };
|
||||
return { format: context.format || 'module', source: transformed.code, shortCircuit: true };
|
||||
}
|
||||
|
||||
let transport: PortTransport | undefined;
|
||||
|
@ -163,7 +163,7 @@ export function setTransformData(pluginName: string, value: any) {
|
||||
transformData.set(pluginName, value);
|
||||
}
|
||||
|
||||
export function transformHook(originalCode: string, filename: string, moduleUrl?: string): string {
|
||||
export function transformHook(originalCode: string, filename: string, moduleUrl?: string): { code: string, serializedCache?: any } {
|
||||
const isTypeScript = filename.endsWith('.ts') || filename.endsWith('.tsx') || filename.endsWith('.mts') || filename.endsWith('.cts');
|
||||
const hasPreprocessor =
|
||||
process.env.PW_TEST_SOURCE_TRANSFORM &&
|
||||
@ -172,9 +172,9 @@ export function transformHook(originalCode: string, filename: string, moduleUrl?
|
||||
const pluginsPrologue = _transformConfig.babelPlugins;
|
||||
const pluginsEpilogue = hasPreprocessor ? [[process.env.PW_TEST_SOURCE_TRANSFORM!]] as BabelPlugin[] : [];
|
||||
const hash = calculateHash(originalCode, filename, !!moduleUrl, pluginsPrologue, pluginsEpilogue);
|
||||
const { cachedCode, addToCache } = getFromCompilationCache(filename, hash, moduleUrl);
|
||||
const { cachedCode, addToCache, serializedCache } = getFromCompilationCache(filename, hash, moduleUrl);
|
||||
if (cachedCode !== undefined)
|
||||
return cachedCode;
|
||||
return { code: cachedCode, serializedCache };
|
||||
|
||||
// We don't use any browserslist data, but babel checks it anyway.
|
||||
// Silence the annoying warning.
|
||||
@ -183,9 +183,10 @@ export function transformHook(originalCode: string, filename: string, moduleUrl?
|
||||
const { babelTransform }: { babelTransform: BabelTransformFunction } = require('./babelBundle');
|
||||
transformData = new Map<string, any>();
|
||||
const { code, map } = babelTransform(originalCode, filename, isTypeScript, !!moduleUrl, pluginsPrologue, pluginsEpilogue);
|
||||
if (code)
|
||||
addToCache!(code, map, transformData);
|
||||
return code || '';
|
||||
if (!code)
|
||||
return { code: '', serializedCache };
|
||||
const added = addToCache!(code, map, transformData);
|
||||
return { code, serializedCache: added.serializedCache };
|
||||
}
|
||||
|
||||
function calculateHash(content: string, filePath: string, isModule: boolean, pluginsPrologue: BabelPlugin[], pluginsEpilogue: BabelPlugin[]): string {
|
||||
@ -239,7 +240,7 @@ function installTransform(): () => void {
|
||||
const revertPirates = pirates.addHook((code: string, filename: string) => {
|
||||
if (!shouldTransform(filename))
|
||||
return code;
|
||||
return transformHook(code, filename);
|
||||
return transformHook(code, filename).code;
|
||||
}, { exts: ['.ts', '.tsx', '.js', '.jsx', '.mjs'] });
|
||||
|
||||
return () => {
|
||||
|
@ -157,6 +157,32 @@ test('should use source maps', async ({ runInlineTest }) => {
|
||||
expect(output).toContain('[foo] › a.test.ts:4:7 › check project name');
|
||||
});
|
||||
|
||||
test('should use source maps when importing a file throws an error', async ({ runInlineTest }) => {
|
||||
test.info().annotations.push({ type: 'issue', description: 'https://github.com/microsoft/playwright/issues/29418' });
|
||||
|
||||
const result = await runInlineTest({
|
||||
'package.json': `{ "type": "module" }`,
|
||||
'playwright.config.ts': `
|
||||
export default {};
|
||||
`,
|
||||
'a.test.ts': `
|
||||
import { test, expect } from '@playwright/test';
|
||||
|
||||
throw new Error('Oh my!');
|
||||
`
|
||||
});
|
||||
expect(result.exitCode).toBe(1);
|
||||
expect(result.output).toContain(`Error: Oh my!
|
||||
|
||||
at a.test.ts:4
|
||||
|
||||
2 | import { test, expect } from '@playwright/test';
|
||||
3 |
|
||||
> 4 | throw new Error('Oh my!');
|
||||
| ^
|
||||
`);
|
||||
});
|
||||
|
||||
test('should show the codeframe in errors', async ({ runInlineTest }) => {
|
||||
const result = await runInlineTest({
|
||||
'package.json': `{ "type": "module" }`,
|
||||
|
@ -1706,6 +1706,5 @@ test('TestSuite.project() should return owning project', async ({ runInlineTest,
|
||||
|
||||
const { exitCode, output } = await mergeReports(test.info().outputPath('blob-report'), undefined, { additionalArgs: ['--config', 'merge.config.ts'] });
|
||||
expect(exitCode).toBe(0);
|
||||
console.log(output);
|
||||
expect(output).toContain(`test project: my-project`);
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user