mirror of
https://github.com/microsoft/playwright.git
synced 2024-12-12 11:50:22 +03:00
devops: re-factor list-dependencies script to output per-browser results (#3194)
This patch: - refactors script to output per-browser package dependencies. This is to aid with per-browser docker setup and Playwright github action. - sorts package maps for both Ubuntu 18.04 and Ubuntu 20.04 alphabetically (and removes a stray dependency) References #2926
This commit is contained in:
parent
84a17f27eb
commit
487bc589b0
@ -6,47 +6,205 @@ const path = require('path');
|
||||
const {spawn} = require('child_process');
|
||||
const browserPaths = require('playwright/lib/install/browserPaths.js');
|
||||
|
||||
(async () => {
|
||||
const allBrowsersPath = browserPaths.browsersPath();
|
||||
const {stdout} = await runCommand('find', [allBrowsersPath, '-executable', '-type', 'f']);
|
||||
// lddPaths - files we want to run LDD against.
|
||||
const lddPaths = stdout.trim().split('\n').map(f => f.trim()).filter(filePath => !filePath.toLowerCase().endsWith('.sh'));
|
||||
// List of all shared libraries missing.
|
||||
const missingDeps = new Set();
|
||||
// Multimap: reverse-mapping from shared library to requiring file.
|
||||
const depsToLddPaths = new Map();
|
||||
await Promise.all(lddPaths.map(async lddPath => {
|
||||
const deps = await missingFileDependencies(lddPath);
|
||||
for (const dep of deps) {
|
||||
missingDeps.add(dep);
|
||||
let depsToLdd = depsToLddPaths.get(dep);
|
||||
if (!depsToLdd) {
|
||||
depsToLdd = new Set();
|
||||
depsToLddPaths.set(dep, depsToLdd);
|
||||
}
|
||||
depsToLdd.add(lddPath);
|
||||
}
|
||||
}));
|
||||
console.log(`==== MISSING DEPENDENCIES: ${missingDeps.size} ====`);
|
||||
console.log([...missingDeps].sort().join('\n'));
|
||||
const readdirAsync = util.promisify(fs.readdir.bind(fs));
|
||||
const readFileAsync = util.promisify(fs.readFile.bind(fs));
|
||||
|
||||
console.log('{');
|
||||
for (const dep of missingDeps) {
|
||||
const packages = await findPackages(dep);
|
||||
if (packages.length === 0) {
|
||||
console.log(` // UNRESOLVED: ${dep} `);
|
||||
const depsToLdd = depsToLddPaths.get(dep);
|
||||
for (const filePath of depsToLdd)
|
||||
console.log(` // - required by ${filePath}`);
|
||||
const readline = require('readline');
|
||||
|
||||
// These libraries are accessed dynamically by browsers using `dlopen` system call and
|
||||
// thus have to be installed in the system.
|
||||
//
|
||||
// Tip: to assess which libraries are getting opened dynamically, one can use `strace`:
|
||||
//
|
||||
// strace -f -e trace=open,openat <program>
|
||||
//
|
||||
const DL_OPEN_LIBRARIES = {
|
||||
chromium: [],
|
||||
firefox: [],
|
||||
webkit: [ 'libGLESv2.so.2' ],
|
||||
};
|
||||
|
||||
(async () => {
|
||||
console.log('Working on:', await getDistributionName());
|
||||
console.log('Started at:', currentTime());
|
||||
const allBrowsersPath = browserPaths.browsersPath();
|
||||
const browserDescriptors = (await readdirAsync(allBrowsersPath)).filter(dir => !dir.startsWith('.')).map(dir => ({
|
||||
// Full browser name, e.g. `webkit-1144`
|
||||
name: dir,
|
||||
// Full patch to browser files
|
||||
path: path.join(allBrowsersPath, dir),
|
||||
// All files that we will try to inspect for missing dependencies.
|
||||
filePaths: [],
|
||||
// All libraries that are missing for the browser.
|
||||
missingLibraries: new Set(),
|
||||
// All packages required for the browser.
|
||||
requiredPackages: new Set(),
|
||||
// Libraries that we didn't find a package.
|
||||
unresolvedLibraries: new Set(),
|
||||
}));
|
||||
|
||||
// Collect all missing libraries for all browsers.
|
||||
const allMissingLibraries = new Set();
|
||||
for (const descriptor of browserDescriptors) {
|
||||
// Browser vendor, can be `webkit`, `firefox` or `chromium`
|
||||
const vendor = descriptor.name.split('-')[0];
|
||||
for (const library of DL_OPEN_LIBRARIES[vendor]) {
|
||||
descriptor.missingLibraries.add(library);
|
||||
allMissingLibraries.add(library);
|
||||
}
|
||||
|
||||
const {stdout} = await runCommand('find', [descriptor.path, '-type', 'f']);
|
||||
descriptor.filePaths = stdout.trim().split('\n').map(f => f.trim()).filter(filePath => !filePath.toLowerCase().endsWith('.sh'));
|
||||
await Promise.all(descriptor.filePaths.map(async filePath => {
|
||||
const missingLibraries = await missingFileDependencies(filePath);
|
||||
for (const library of missingLibraries) {
|
||||
descriptor.missingLibraries.add(library);
|
||||
allMissingLibraries.add(library);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
const libraryToPackage = new Map();
|
||||
const ambiguityLibraries = new Map();
|
||||
|
||||
// Map missing libraries to packages that could be installed to fulfill the dependency.
|
||||
console.log(`Finding packages for ${allMissingLibraries.size} missing libraries...`);
|
||||
|
||||
for (let i = 0, array = [...allMissingLibraries].sort(); i < allMissingLibraries.size; ++i) {
|
||||
const library = array[i];
|
||||
const packages = await findPackages(library);
|
||||
|
||||
const progress = `${i + 1}/${allMissingLibraries.size}`;
|
||||
console.log(`${progress.padStart(7)}: ${library} => ${JSON.stringify(packages)}`);
|
||||
|
||||
if (!packages.length) {
|
||||
const browsersWithMissingLibrary = browserDescriptors.filter(d => d.missingLibraries.has(library)).map(d => d.name).join(', ');
|
||||
const PADDING = ''.padStart(7) + ' ';
|
||||
console.log(PADDING + `ERROR: failed to resolve '${library}' required by ${browsersWithMissingLibrary}`);
|
||||
} else if (packages.length === 1) {
|
||||
console.log(` "${dep}": "${packages[0]}",`);
|
||||
libraryToPackage.set(library, packages[0]);
|
||||
} else {
|
||||
console.log(` "${dep}": ${JSON.stringify(packages)},`);
|
||||
ambiguityLibraries.set(library, packages);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('');
|
||||
console.log(`Picking packages for ${ambiguityLibraries.size} libraries that have multiple package candidates`);
|
||||
// Pick packages to install to fulfill missing libraries.
|
||||
//
|
||||
// This is a 2-step process:
|
||||
// 1. Pick easy libraries by filtering out debug, test and dev packages.
|
||||
// 2. After that, pick packages that we already picked before.
|
||||
|
||||
// Step 1: pick libraries that are easy to pick.
|
||||
const totalAmbiguityLibraries = ambiguityLibraries.size;
|
||||
for (const [library, packages] of ambiguityLibraries) {
|
||||
const package = pickPackage(library, packages);
|
||||
if (!package)
|
||||
continue;
|
||||
libraryToPackage.set(library, package);
|
||||
ambiguityLibraries.delete(library);
|
||||
const progress = `${totalAmbiguityLibraries - ambiguityLibraries.size}/${totalAmbiguityLibraries}`;
|
||||
console.log(`${progress.padStart(7)}: ${library} => ${package}`);
|
||||
console.log(''.padStart(9) + `(note) packages are ${JSON.stringify(packages)}`);
|
||||
}
|
||||
// 2nd pass - prefer packages that we already picked.
|
||||
const allUsedPackages = new Set(libraryToPackage.values());
|
||||
for (const [library, packages] of ambiguityLibraries) {
|
||||
const package = packages.find(package => allUsedPackages.has(package));
|
||||
if (!package)
|
||||
continue;
|
||||
libraryToPackage.set(library, package);
|
||||
ambiguityLibraries.delete(library);
|
||||
const progress = `${totalAmbiguityLibraries - ambiguityLibraries.size}/${totalAmbiguityLibraries}`;
|
||||
console.log(`${progress.padStart(7)}: ${library} => ${package}`);
|
||||
console.log(''.padStart(9) + `(note) packages are ${JSON.stringify(packages)}`);
|
||||
}
|
||||
|
||||
// 3rd pass - prompt user to resolve ambiguity.
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
});
|
||||
const promptAsync = (question) => new Promise(resolve => rl.question(question, resolve));
|
||||
|
||||
// Report all ambiguities that were failed to resolve.
|
||||
for (const [library, packages] of ambiguityLibraries) {
|
||||
const question = [
|
||||
`Pick a package for '${library}':`,
|
||||
...packages.map((package, index) => ` (${index + 1}) ${package}`),
|
||||
'Enter number: ',
|
||||
].join('\n');
|
||||
|
||||
const answer = await promptAsync(question);
|
||||
const index = parseInt(answer, 10) - 1;
|
||||
if (isNaN(index) || (index < 0) || (index >= packages.length)) {
|
||||
console.error(`ERROR: unknown index "${answer}". Must be a number between 1 and ${packages.length}`);
|
||||
process.exit(1);
|
||||
}
|
||||
const package = packages[index];
|
||||
|
||||
ambiguityLibraries.delete(library);
|
||||
libraryToPackage.set(library, package);
|
||||
console.log(answer);
|
||||
console.log(`- ${library} => ${package}`);
|
||||
}
|
||||
rl.close();
|
||||
|
||||
// For each browser build a list of packages to install.
|
||||
for (const descriptor of browserDescriptors) {
|
||||
for (const library of descriptor.missingLibraries) {
|
||||
const package = libraryToPackage.get(library);
|
||||
if (package)
|
||||
descriptor.requiredPackages.add(package);
|
||||
else
|
||||
descriptor.unresolvedLibraries.add(library);
|
||||
}
|
||||
}
|
||||
|
||||
// Formatting results.
|
||||
console.log('');
|
||||
console.log(`----- Library to package name mapping -----`);
|
||||
console.log('{');
|
||||
const sortedEntries = [...libraryToPackage.entries()].sort((a, b) => a[0].localeCompare(b[0]));
|
||||
for (const [library, package] of sortedEntries)
|
||||
console.log(` "${library}": "${package}",`);
|
||||
console.log('}');
|
||||
|
||||
// Packages and unresolved libraries for every browser
|
||||
for (const descriptor of browserDescriptors) {
|
||||
console.log('');
|
||||
console.log(`======= ${descriptor.name}: required packages =======`);
|
||||
const requiredPackages = [...descriptor.requiredPackages].sort();
|
||||
console.log(JSON.stringify(requiredPackages, null, 2));
|
||||
console.log('');
|
||||
console.log(`------- ${descriptor.name}: unresolved libraries -------`);
|
||||
const unresolvedLibraries = [...descriptor.unresolvedLibraries].sort();
|
||||
console.log(JSON.stringify(unresolvedLibraries, null, 2));
|
||||
}
|
||||
|
||||
const status = browserDescriptors.some(d => d.unresolvedLibraries.size) ? 'FAILED' : 'SUCCESS';
|
||||
console.log(`
|
||||
====================
|
||||
${status}
|
||||
====================
|
||||
`);
|
||||
})();
|
||||
|
||||
function pickPackage(library, packages) {
|
||||
// Step 1: try to filter out debug, test and dev packages.
|
||||
packages = packages.filter(package => !package.endsWith('-dbg') && !package.endsWith('-test') && !package.endsWith('-dev') && !package.endsWith('-mesa'));
|
||||
if (packages.length === 1)
|
||||
return packages[0];
|
||||
// Step 2: use library name to filter packages with the same name.
|
||||
const prefix = library.split(/[-.]/).shift().toLowerCase();
|
||||
packages = packages.filter(package => package.toLowerCase().startsWith(prefix));
|
||||
if (packages.length === 1)
|
||||
return packages[0];
|
||||
return null;
|
||||
}
|
||||
|
||||
async function findPackages(libraryName) {
|
||||
const {stdout} = await runCommand('apt-file', ['search', libraryName]);
|
||||
if (!stdout.trim())
|
||||
@ -56,7 +214,9 @@ async function findPackages(libraryName) {
|
||||
}
|
||||
|
||||
async function fileDependencies(filePath) {
|
||||
const {stdout} = await lddAsync(filePath);
|
||||
const {stdout, code} = await lddAsync(filePath);
|
||||
if (code !== 0)
|
||||
return [];
|
||||
const deps = stdout.split('\n').map(line => {
|
||||
line = line.trim();
|
||||
const missing = line.includes('not found');
|
||||
@ -100,3 +260,27 @@ function runCommand(command, args, options = {}) {
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function getDistributionName() {
|
||||
const osReleaseText = await readFileAsync('/etc/os-release', 'utf8');
|
||||
const fields = new Map();
|
||||
for (const line of osReleaseText.split('\n')) {
|
||||
const tokens = line.split('=');
|
||||
const name = tokens.shift();
|
||||
let value = tokens.join('=').trim();
|
||||
if (value.startsWith('"') && value.endsWith('"'))
|
||||
value = value.substring(1, value.length - 1);
|
||||
if (!name)
|
||||
continue;
|
||||
fields.set(name.toLowerCase(), value);
|
||||
}
|
||||
return fields.get('pretty_name') || '';
|
||||
}
|
||||
|
||||
function currentTime() {
|
||||
const date = new Date();
|
||||
const dateTimeFormat = new Intl.DateTimeFormat('en', { year: 'numeric', month: 'short', day: '2-digit' });
|
||||
const [{ value: month },,{ value: day },,{ value: year }] = dateTimeFormat .formatToParts(date );
|
||||
return `${month} ${day}, ${year}`;
|
||||
}
|
||||
|
||||
|
@ -16,4 +16,8 @@ mkdir /root/tmp && cd /root/tmp && npm init -y && npm i /root/hostfolder/playwri
|
||||
|
||||
cp /root/hostfolder/inside_docker/list_dependencies.js /root/tmp/list_dependencies.js
|
||||
|
||||
node list_dependencies.js | tee /root/hostfolder/RUN_RESULT
|
||||
FILENAME="RUN_RESULT"
|
||||
if [[ -n $1 ]]; then
|
||||
FILENAME=$1
|
||||
fi
|
||||
node list_dependencies.js | tee "/root/hostfolder/$FILENAME"
|
||||
|
@ -3,12 +3,12 @@ set -e
|
||||
set +x
|
||||
|
||||
if [[ ($1 == '--help') || ($1 == '-h') ]]; then
|
||||
echo "usage: $(basename $0) <image-name>"
|
||||
echo "usage: $(basename $0) <image-name> [<optional output filename>]"
|
||||
echo
|
||||
echo "List mapping between browser dependencies to package names and save results in RUN_RESULT file."
|
||||
echo "List mapping between browser dependencies to package names and save results in RUN_RESULT file or a custom file name."
|
||||
echo "Example:"
|
||||
echo ""
|
||||
echo " $(basename $0) ubuntu:bionic"
|
||||
echo " $(basename $0) ubuntu:bionic ubuntu-bionic-run-log"
|
||||
echo ""
|
||||
echo "NOTE: this requires Playwright dependencies to be installed with 'npm install'"
|
||||
echo " and Playwright itself being built with 'npm run build'"
|
||||
@ -31,5 +31,5 @@ cd "$(dirname "$0")"
|
||||
# We rely on `./playwright.tar.gz` to download browsers into the docker image.
|
||||
node ../../packages/build_package.js playwright ./playwright.tar.gz
|
||||
|
||||
docker run -v $PWD:/root/hostfolder --rm -it "$1" /root/hostfolder/inside_docker/process.sh
|
||||
docker run -v $PWD:/root/hostfolder --rm -it "$1" /root/hostfolder/inside_docker/process.sh "$2"
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user