uawdijnntqw1x1x1
IP : 216.73.216.110
Hostname : 6.87.74.97.host.secureserver.net
Kernel : Linux 6.87.74.97.host.secureserver.net 4.18.0-553.83.1.el8_10.x86_64 #1 SMP Mon Nov 10 04:22:44 EST 2025 x86_64
Disable Function : None :)
OS : Linux
PATH:
/
home
/
emeraadmin
/
www
/
node_modules
/
path-parse
/
..
/
debug
/
..
/
xmlbuilder
/
..
/
..
/
js
/
..
/
4d695
/
@npmcli.zip
/
/
PK]�\h�T�^^package-json/package.jsonnu�[���{ "_id": "@npmcli/package-json@5.1.1", "_inBundle": true, "_location": "/npm/@npmcli/package-json", "_phantomChildren": {}, "_requiredBy": [ "/npm", "/npm/@npmcli/arborist", "/npm/@npmcli/run-script", "/npm/init-package-json", "/npm/pacote" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/package-json/issues" }, "dependencies": { "@npmcli/git": "^5.0.0", "glob": "^10.2.2", "hosted-git-info": "^7.0.0", "json-parse-even-better-errors": "^3.0.0", "normalize-package-data": "^6.0.0", "proc-log": "^4.0.0", "semver": "^7.5.3" }, "description": "Programmatic API to update package.json", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.22.0", "read-package-json": "^7.0.0", "read-package-json-fast": "^3.0.2", "tap": "^16.0.1" }, "engines": { "node": "^16.14.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/package-json#readme", "keywords": [ "npm", "oss" ], "license": "ISC", "main": "lib/index.js", "name": "@npmcli/package-json", "repository": { "type": "git", "url": "git+https://github.com/npm/package-json.git" }, "scripts": { "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "postsnap": "npm run lintfix --", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.22.0", "publish": "true" }, "version": "5.1.1" } PK]�\,"a̘�"package-json/lib/update-scripts.jsnu�[���const updateScripts = ({ content, originalContent = {} }) => { const newScripts = content.scripts if (!newScripts) { return originalContent } // validate scripts content being appended const hasInvalidScripts = () => Object.entries(newScripts) .some(([key, value]) => typeof key !== 'string' || typeof value !== 'string') if (hasInvalidScripts()) { throw Object.assign( new TypeError( 'package.json scripts should be a key-value pair of strings.'), { code: 'ESCRIPTSINVALID' } ) } return { ...originalContent, scripts: { ...newScripts, }, } } module.exports = updateScripts PK]�\�j!rr%package-json/lib/update-workspaces.jsnu�[���const updateWorkspaces = ({ content, originalContent = {} }) => { const newWorkspaces = content.workspaces if (!newWorkspaces) { return originalContent } // validate workspaces content being appended const hasInvalidWorkspaces = () => newWorkspaces.some(w => !(typeof w === 'string')) if (!newWorkspaces.length || hasInvalidWorkspaces()) { throw Object.assign( new TypeError('workspaces should be an array of strings.'), { code: 'EWORKSPACESINVALID' } ) } return { ...originalContent, workspaces: [ ...newWorkspaces, ], } } module.exports = updateWorkspaces PK]�\#Ͷ��'package-json/lib/update-dependencies.jsnu�[���const depTypes = new Set([ 'dependencies', 'optionalDependencies', 'devDependencies', 'peerDependencies', ]) // sort alphabetically all types of deps for a given package const orderDeps = (content) => { for (const type of depTypes) { if (content && content[type]) { content[type] = Object.keys(content[type]) .sort((a, b) => a.localeCompare(b, 'en')) .reduce((res, key) => { res[key] = content[type][key] return res }, {}) } } return content } const updateDependencies = ({ content, originalContent }) => { const pkg = orderDeps({ ...content, }) // optionalDependencies don't need to be repeated in two places if (pkg.dependencies) { if (pkg.optionalDependencies) { for (const name of Object.keys(pkg.optionalDependencies)) { delete pkg.dependencies[name] } } } const result = { ...originalContent } // loop through all types of dependencies and update package json pkg for (const type of depTypes) { if (pkg[type]) { result[type] = pkg[type] } // prune empty type props from resulting object const emptyDepType = pkg[type] && typeof pkg === 'object' && Object.keys(pkg[type]).length === 0 if (emptyDepType) { delete result[type] } } // if original package.json had dep in peerDeps AND deps, preserve that. const { dependencies: origProd, peerDependencies: origPeer } = originalContent || {} const { peerDependencies: newPeer } = result if (origProd && origPeer && newPeer) { // we have original prod/peer deps, and new peer deps // copy over any that were in both in the original for (const name of Object.keys(origPeer)) { if (origProd[name] !== undefined && newPeer[name] !== undefined) { result.dependencies = result.dependencies || {} result.dependencies[name] = newPeer[name] } } } return result } updateDependencies.knownKeys = depTypes module.exports = updateDependencies PK]�\V_0package-json/lib/index.jsnu�[���const { readFile, writeFile } = require('fs/promises') const { resolve } = require('path') const updateDeps = require('./update-dependencies.js') const updateScripts = require('./update-scripts.js') const updateWorkspaces = require('./update-workspaces.js') const normalize = require('./normalize.js') const parseJSON = require('json-parse-even-better-errors') // a list of handy specialized helper functions that take // care of special cases that are handled by the npm cli const knownSteps = new Set([ updateDeps, updateScripts, updateWorkspaces, ]) // list of all keys that are handled by "knownSteps" helpers const knownKeys = new Set([ ...updateDeps.knownKeys, 'scripts', 'workspaces', ]) class PackageJson { static normalizeSteps = Object.freeze([ '_id', '_attributes', 'bundledDependencies', 'bundleDependencies', 'optionalDedupe', 'scripts', 'funding', 'bin', ]) // npm pkg fix static fixSteps = Object.freeze([ 'binRefs', 'bundleDependencies', 'bundleDependenciesFalse', 'fixNameField', 'fixVersionField', 'fixRepositoryField', 'fixDependencies', 'devDependencies', 'scriptpath', ]) static prepareSteps = Object.freeze([ '_id', '_attributes', 'bundledDependencies', 'bundleDependencies', 'bundleDependenciesDeleteFalse', 'gypfile', 'serverjs', 'scriptpath', 'authors', 'readme', 'mans', 'binDir', 'gitHead', 'fillTypes', 'normalizeData', 'binRefs', ]) // create a new empty package.json, so we can save at the given path even // though we didn't start from a parsed file static async create (path, opts = {}) { const p = new PackageJson() await p.create(path) if (opts.data) { return p.update(opts.data) } return p } // Loads a package.json at given path and JSON parses static async load (path, opts = {}) { const p = new PackageJson() // Avoid try/catch if we aren't going to create if (!opts.create) { return p.load(path) } try { return await p.load(path) } catch (err) { if (!err.message.startsWith('Could not read package.json')) { throw err } return await p.create(path) } } // npm pkg fix static async fix (path, opts) { const p = new PackageJson() await p.load(path, true) return p.fix(opts) } // read-package-json compatible behavior static async prepare (path, opts) { const p = new PackageJson() await p.load(path, true) return p.prepare(opts) } // read-package-json-fast compatible behavior static async normalize (path, opts) { const p = new PackageJson() await p.load(path) return p.normalize(opts) } #path #manifest #readFileContent = '' #canSave = true // Load content from given path async load (path, parseIndex) { this.#path = path let parseErr try { this.#readFileContent = await readFile(this.filename, 'utf8') } catch (err) { err.message = `Could not read package.json: ${err}` if (!parseIndex) { throw err } parseErr = err } if (parseErr) { const indexFile = resolve(this.path, 'index.js') let indexFileContent try { indexFileContent = await readFile(indexFile, 'utf8') } catch (err) { throw parseErr } try { this.fromComment(indexFileContent) } catch (err) { throw parseErr } // This wasn't a package.json so prevent saving this.#canSave = false return this } return this.fromJSON(this.#readFileContent) } // Load data from a JSON string/buffer fromJSON (data) { try { this.#manifest = parseJSON(data) } catch (err) { err.message = `Invalid package.json: ${err}` throw err } return this } fromContent (data) { this.#manifest = data this.#canSave = false return this } // Load data from a comment // /**package { "name": "foo", "version": "1.2.3", ... } **/ fromComment (data) { data = data.split(/^\/\*\*package(?:\s|$)/m) if (data.length < 2) { throw new Error('File has no package in comments') } data = data[1] data = data.split(/\*\*\/$/m) if (data.length < 2) { throw new Error('File has no package in comments') } data = data[0] data = data.replace(/^\s*\*/mg, '') this.#manifest = parseJSON(data) return this } get content () { return this.#manifest } get path () { return this.#path } get filename () { if (this.path) { return resolve(this.path, 'package.json') } return undefined } create (path) { this.#path = path this.#manifest = {} return this } // This should be the ONLY way to set content in the manifest update (content) { if (!this.content) { throw new Error('Can not update without content. Please `load` or `create`') } for (const step of knownSteps) { this.#manifest = step({ content, originalContent: this.content }) } // unknown properties will just be overwitten for (const [key, value] of Object.entries(content)) { if (!knownKeys.has(key)) { this.content[key] = value } } return this } async save () { if (!this.#canSave) { throw new Error('No package.json to save to') } const { [Symbol.for('indent')]: indent, [Symbol.for('newline')]: newline, } = this.content const format = indent === undefined ? ' ' : indent const eol = newline === undefined ? '\n' : newline const fileContent = `${ JSON.stringify(this.content, null, format) }\n` .replace(/\n/g, eol) if (fileContent.trim() !== this.#readFileContent.trim()) { return await writeFile(this.filename, fileContent) } } async normalize (opts = {}) { if (!opts.steps) { opts.steps = this.constructor.normalizeSteps } await normalize(this, opts) return this } async prepare (opts = {}) { if (!opts.steps) { opts.steps = this.constructor.prepareSteps } await normalize(this, opts) return this } async fix (opts = {}) { // This one is not overridable opts.steps = this.constructor.fixSteps await normalize(this, opts) return this } } module.exports = PackageJson PK]�\����M�Mpackage-json/lib/normalize.jsnu�[���const valid = require('semver/functions/valid') const clean = require('semver/functions/clean') const fs = require('fs/promises') const path = require('path') const { log } = require('proc-log') /** * @type {import('hosted-git-info')} */ let _hostedGitInfo function lazyHostedGitInfo () { if (!_hostedGitInfo) { _hostedGitInfo = require('hosted-git-info') } return _hostedGitInfo } /** * @type {import('glob').glob} */ let _glob function lazyLoadGlob () { if (!_glob) { _glob = require('glob').glob } return _glob } // used to be npm-normalize-package-bin function normalizePackageBin (pkg, changes) { if (pkg.bin) { if (typeof pkg.bin === 'string' && pkg.name) { changes?.push('"bin" was converted to an object') pkg.bin = { [pkg.name]: pkg.bin } } else if (Array.isArray(pkg.bin)) { changes?.push('"bin" was converted to an object') pkg.bin = pkg.bin.reduce((acc, k) => { acc[path.basename(k)] = k return acc }, {}) } if (typeof pkg.bin === 'object') { for (const binKey in pkg.bin) { if (typeof pkg.bin[binKey] !== 'string') { delete pkg.bin[binKey] changes?.push(`removed invalid "bin[${binKey}]"`) continue } const base = path.basename(secureAndUnixifyPath(binKey)) if (!base) { delete pkg.bin[binKey] changes?.push(`removed invalid "bin[${binKey}]"`) continue } const binTarget = secureAndUnixifyPath(pkg.bin[binKey]) if (!binTarget) { delete pkg.bin[binKey] changes?.push(`removed invalid "bin[${binKey}]"`) continue } if (base !== binKey) { delete pkg.bin[binKey] changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`) } if (binTarget !== pkg.bin[binKey]) { changes?.push(`"bin[${base}]" script name was cleaned`) } pkg.bin[base] = binTarget } if (Object.keys(pkg.bin).length === 0) { changes?.push('empty "bin" was removed') delete pkg.bin } return pkg } } delete pkg.bin } function normalizePackageMan (pkg, changes) { if (pkg.man) { const mans = [] for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) { if (typeof man !== 'string') { changes?.push(`removed invalid "man [${man}]"`) } else { mans.push(secureAndUnixifyPath(man)) } } if (!mans.length) { changes?.push('empty "man" was removed') } else { pkg.man = mans return pkg } } delete pkg.man } function isCorrectlyEncodedName (spec) { return !spec.match(/[/@\s+%:]/) && spec === encodeURIComponent(spec) } function isValidScopedPackageName (spec) { if (spec.charAt(0) !== '@') { return false } const rest = spec.slice(1).split('/') if (rest.length !== 2) { return false } return rest[0] && rest[1] && rest[0] === encodeURIComponent(rest[0]) && rest[1] === encodeURIComponent(rest[1]) } function unixifyPath (ref) { return ref.replace(/\\|:/g, '/') } function securePath (ref) { const secured = path.join('.', path.join('/', unixifyPath(ref))) return secured.startsWith('.') ? '' : secured } function secureAndUnixifyPath (ref) { return unixifyPath(securePath(ref)) } // We don't want the `changes` array in here by default because this is a hot // path for parsing packuments during install. So the calling method passes it // in if it wants to track changes. const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => { if (!pkg.content) { throw new Error('Can not normalize without content') } const data = pkg.content const scripts = data.scripts || {} const pkgId = `${data.name ?? ''}@${data.version ?? ''}` // name and version are load bearing so we have to clean them up first if (steps.includes('fixNameField') || steps.includes('normalizeData')) { if (!data.name && !strict) { changes?.push('Missing "name" field was set to an empty string') data.name = '' } else { if (typeof data.name !== 'string') { throw new Error('name field must be a string.') } if (!strict) { const name = data.name.trim() if (data.name !== name) { changes?.push(`Whitespace was trimmed from "name"`) data.name = name } } if (data.name.startsWith('.') || !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) || (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) || data.name.toLowerCase() === 'node_modules' || data.name.toLowerCase() === 'favicon.ico') { throw new Error('Invalid name: ' + JSON.stringify(data.name)) } } } if (steps.includes('fixVersionField') || steps.includes('normalizeData')) { // allow "loose" semver 1.0 versions in non-strict mode // enforce strict semver 2.0 compliance in strict mode const loose = !strict if (!data.version) { data.version = '' } else { if (!valid(data.version, loose)) { throw new Error(`Invalid version: "${data.version}"`) } const version = clean(data.version, loose) if (version !== data.version) { changes?.push(`"version" was cleaned and set to "${version}"`) data.version = version } } } // remove attributes that start with "_" if (steps.includes('_attributes')) { for (const key in data) { if (key.startsWith('_')) { changes?.push(`"${key}" was removed`) delete pkg.content[key] } } } // build the "_id" attribute if (steps.includes('_id')) { if (data.name && data.version) { changes?.push(`"_id" was set to ${pkgId}`) data._id = pkgId } } // fix bundledDependencies typo // normalize bundleDependencies if (steps.includes('bundledDependencies')) { if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) { data.bundleDependencies = data.bundledDependencies } changes?.push(`Deleted incorrect "bundledDependencies"`) delete data.bundledDependencies } // expand "bundleDependencies: true or translate from object" if (steps.includes('bundleDependencies')) { const bd = data.bundleDependencies if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) { changes?.push(`"bundleDependencies" was changed from "false" to "[]"`) data.bundleDependencies = [] } else if (bd === true) { changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`) data.bundleDependencies = Object.keys(data.dependencies || {}) } else if (bd && typeof bd === 'object') { if (!Array.isArray(bd)) { changes?.push(`"bundleDependencies" was changed from an object to an array`) data.bundleDependencies = Object.keys(bd) } } else if ('bundleDependencies' in data) { changes?.push(`"bundleDependencies" was removed`) delete data.bundleDependencies } } // it was once common practice to list deps both in optionalDependencies and // in dependencies, to support npm versions that did not know about // optionalDependencies. This is no longer a relevant need, so duplicating // the deps in two places is unnecessary and excessive. if (steps.includes('optionalDedupe')) { if (data.dependencies && data.optionalDependencies && typeof data.optionalDependencies === 'object') { for (const name in data.optionalDependencies) { changes?.push(`optionalDependencies."${name}" was removed`) delete data.dependencies[name] } if (!Object.keys(data.dependencies).length) { changes?.push(`Empty "optionalDependencies" was removed`) delete data.dependencies } } } // add "install" attribute if any "*.gyp" files exist if (steps.includes('gypfile')) { if (!scripts.install && !scripts.preinstall && data.gypfile !== false) { const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path }) if (files.length) { scripts.install = 'node-gyp rebuild' data.scripts = scripts data.gypfile = true changes?.push(`"scripts.install" was set to "node-gyp rebuild"`) changes?.push(`"gypfile" was set to "true"`) } } } // add "start" attribute if "server.js" exists if (steps.includes('serverjs') && !scripts.start) { try { await fs.access(path.join(pkg.path, 'server.js')) scripts.start = 'node server.js' data.scripts = scripts changes?.push('"scripts.start" was set to "node server.js"') } catch { // do nothing } } // strip "node_modules/.bin" from scripts entries // remove invalid scripts entries (non-strings) if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) { const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/ if (typeof data.scripts === 'object') { for (const name in data.scripts) { if (typeof data.scripts[name] !== 'string') { delete data.scripts[name] changes?.push(`Invalid scripts."${name}" was removed`) } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) { data.scripts[name] = data.scripts[name].replace(spre, '') changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`) } } } else { changes?.push(`Removed invalid "scripts"`) delete data.scripts } } if (steps.includes('funding')) { if (data.funding && typeof data.funding === 'string') { data.funding = { url: data.funding } changes?.push(`"funding" was changed to an object with a url attribute`) } } // populate "authors" attribute if (steps.includes('authors') && !data.contributors) { try { const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8') const authors = authorData.split(/\r?\n/g) .map(line => line.replace(/^\s*#.*$/, '').trim()) .filter(line => line) data.contributors = authors changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file') } catch { // do nothing } } // populate "readme" attribute if (steps.includes('readme') && !data.readme) { const mdre = /\.m?a?r?k?d?o?w?n?$/i const files = await lazyLoadGlob()('{README,README.*}', { cwd: pkg.path, nocase: true, mark: true, }) let readmeFile for (const file of files) { // don't accept directories. if (!file.endsWith(path.sep)) { if (file.match(mdre)) { readmeFile = file break } if (file.endsWith('README')) { readmeFile = file } } } if (readmeFile) { const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8') data.readme = readmeData data.readmeFilename = readmeFile changes?.push(`"readme" was set to the contents of ${readmeFile}`) changes?.push(`"readmeFilename" was set to ${readmeFile}`) } if (!data.readme) { // this.warn('missingReadme') data.readme = 'ERROR: No README data found!' } } // expand directories.man if (steps.includes('mans')) { if (data.directories?.man && !data.man) { const manDir = secureAndUnixifyPath(data.directories.man) const cwd = path.resolve(pkg.path, manDir) const files = await lazyLoadGlob()('**/*.[0-9]', { cwd }) data.man = files.map(man => path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/') ) } normalizePackageMan(data, changes) } if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) { normalizePackageBin(data, changes) } // expand "directories.bin" if (steps.includes('binDir') && data.directories?.bin && !data.bin) { const binsDir = path.resolve(pkg.path, securePath(data.directories.bin)) const bins = await lazyLoadGlob()('**', { cwd: binsDir }) data.bin = bins.reduce((acc, binFile) => { if (binFile && !binFile.startsWith('.')) { const binName = path.basename(binFile) acc[binName] = path.join(data.directories.bin, binFile) } return acc }, {}) // *sigh* normalizePackageBin(data, changes) } // populate "gitHead" attribute if (steps.includes('gitHead') && !data.gitHead) { const git = require('@npmcli/git') const gitRoot = await git.find({ cwd: pkg.path, root }) let head if (gitRoot) { try { head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8') } catch (err) { // do nothing } } let headData if (head) { if (head.startsWith('ref: ')) { const headRef = head.replace(/^ref: /, '').trim() const headFile = path.resolve(gitRoot, '.git', headRef) try { headData = await fs.readFile(headFile, 'utf8') headData = headData.replace(/^ref: /, '').trim() } catch (err) { // do nothing } if (!headData) { const packFile = path.resolve(gitRoot, '.git/packed-refs') try { let refs = await fs.readFile(packFile, 'utf8') if (refs) { refs = refs.split('\n') for (let i = 0; i < refs.length; i++) { const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/) if (match && match[2].trim() === headRef) { headData = match[1] break } } } } catch { // do nothing } } } else { headData = head.trim() } } if (headData) { data.gitHead = headData } } // populate "types" attribute if (steps.includes('fillTypes')) { const index = data.main || 'index.js' if (typeof index !== 'string') { throw new TypeError('The "main" attribute must be of type string.') } // TODO exports is much more complicated than this in verbose format // We need to support for instance // "exports": { // ".": [ // { // "default": "./lib/npm.js" // }, // "./lib/npm.js" // ], // "./package.json": "./package.json" // }, // as well as conditional exports // if (data.exports && typeof data.exports === 'string') { // index = data.exports // } // if (data.exports && data.exports['.']) { // index = data.exports['.'] // if (typeof index !== 'string') { // } // } const extless = path.join(path.dirname(index), path.basename(index, path.extname(index))) const dts = `./${extless}.d.ts` const hasDTSFields = 'types' in data || 'typings' in data if (!hasDTSFields) { try { await fs.access(path.join(pkg.path, dts)) data.types = dts.split(path.sep).join('/') } catch { // do nothing } } } // "normalizeData" from "read-package-json", which was just a call through to // "normalize-package-data". We only call the "fixer" functions because // outside of that it was also clobbering _id (which we already conditionally // do) and also adding the gypfile script (which we also already // conditionally do) // Some steps are isolated so we can do a limited subset of these in `fix` if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) { if (data.repositories) { /* eslint-disable-next-line max-len */ changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`) data.repository = data.repositories[0] } if (data.repository) { if (typeof data.repository === 'string') { changes?.push('"repository" was changed from a string to an object') data.repository = { type: 'git', url: data.repository, } } if (data.repository.url) { const hosted = lazyHostedGitInfo().fromUrl(data.repository.url) let r if (hosted) { if (hosted.getDefaultRepresentation() === 'shortcut') { r = hosted.https() } else { r = hosted.toString() } if (r !== data.repository.url) { changes?.push(`"repository.url" was normalized to "${r}"`) data.repository.url = r } } } } } if (steps.includes('fixDependencies') || steps.includes('normalizeData')) { // peerDependencies? // devDependencies is meaningless here, it's ignored on an installed package for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) { if (data[type]) { let secondWarning = true if (typeof data[type] === 'string') { changes?.push(`"${type}" was converted from a string into an object`) data[type] = data[type].trim().split(/[\n\r\s\t ,]+/) secondWarning = false } if (Array.isArray(data[type])) { if (secondWarning) { changes?.push(`"${type}" was converted from an array into an object`) } const o = {} for (const d of data[type]) { if (typeof d === 'string') { const dep = d.trim().split(/(:?[@\s><=])/) const dn = dep.shift() const dv = dep.join('').replace(/^@/, '').trim() o[dn] = dv } } data[type] = o } } } // normalize-package-data used to put optional dependencies BACK into // dependencies here, we no longer do this for (const deps of ['dependencies', 'devDependencies']) { if (deps in data) { if (!data[deps] || typeof data[deps] !== 'object') { changes?.push(`Removed invalid "${deps}"`) delete data[deps] } else { for (const d in data[deps]) { const r = data[deps][d] if (typeof r !== 'string') { changes?.push(`Removed invalid "${deps}.${d}"`) delete data[deps][d] } const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString() if (hosted && hosted !== data[deps][d]) { changes?.push(`Normalized git reference to "${deps}.${d}"`) data[deps][d] = hosted.toString() } } } } } } if (steps.includes('normalizeData')) { const legacyFixer = require('normalize-package-data/lib/fixer.js') const legacyMakeWarning = require('normalize-package-data/lib/make_warning.js') legacyFixer.warn = function () { changes?.push(legacyMakeWarning.apply(null, arguments)) } const legacySteps = [ 'fixDescriptionField', 'fixModulesField', 'fixFilesField', 'fixManField', 'fixBugsField', 'fixKeywordsField', 'fixBundleDependenciesField', 'fixHomepageField', 'fixReadmeField', 'fixLicenseField', 'fixPeople', 'fixTypos', ] for (const legacyStep of legacySteps) { legacyFixer[legacyStep](data) } } // Warn if the bin references don't point to anything. This might be better // in normalize-package-data if it had access to the file path. if (steps.includes('binRefs') && data.bin instanceof Object) { for (const key in data.bin) { try { await fs.access(path.resolve(pkg.path, data.bin[key])) } catch { log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`) // XXX: should a future breaking change delete bin entries that cannot be accessed? } } } } module.exports = normalize PK]�\)xU1��package-json/LICENSEnu�[���ISC License Copyright GitHub Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\ewHH metavuln-calculator/package.jsonnu�[���{ "_id": "@npmcli/metavuln-calculator@7.1.1", "_inBundle": true, "_location": "/npm/@npmcli/metavuln-calculator", "_phantomChildren": {}, "_requiredBy": [ "/npm/@npmcli/arborist" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/metavuln-calculator/issues" }, "dependencies": { "cacache": "^18.0.0", "json-parse-even-better-errors": "^3.0.0", "pacote": "^18.0.0", "proc-log": "^4.1.0", "semver": "^7.3.5" }, "description": "Calculate meta-vulnerabilities from package security advisories", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.22.0", "require-inject": "^1.4.4", "tap": "^16.0.1" }, "engines": { "node": "^16.14.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/metavuln-calculator#readme", "license": "ISC", "main": "lib/index.js", "name": "@npmcli/metavuln-calculator", "repository": { "type": "git", "url": "git+https://github.com/npm/metavuln-calculator.git" }, "scripts": { "eslint": "eslint", "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "postsnap": "npm run lint", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "check-coverage": true, "coverage-map": "map.js", "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.22.0", "publish": "true", "ciVersions": [ "16.14.0", "16.x", "18.0.0", "18.x" ] }, "version": "7.1.1" } PK]�\��N��metavuln-calculator/lib/hash.jsnu�[���const { createHash } = require('crypto') module.exports = ({ name, source }) => createHash('sha512') .update(JSON.stringify([name, source])) .digest('base64') PK]�\�~4t4t4#metavuln-calculator/lib/advisory.jsnu�[���const hash = require('./hash.js') const semver = require('semver') const semverOpt = { includePrerelease: true, loose: true } const getDepSpec = require('./get-dep-spec.js') // any fields that we don't want in the cache need to be hidden const _source = Symbol('source') const _packument = Symbol('packument') const _versionVulnMemo = Symbol('versionVulnMemo') const _updated = Symbol('updated') const _options = Symbol('options') const _specVulnMemo = Symbol('specVulnMemo') const _testVersion = Symbol('testVersion') const _testVersions = Symbol('testVersions') const _calculateRange = Symbol('calculateRange') const _markVulnerable = Symbol('markVulnerable') const _testSpec = Symbol('testSpec') class Advisory { constructor (name, source, options = {}) { this.source = source.id this[_source] = source this[_options] = options this.name = name if (!source.name) { source.name = name } this.dependency = source.name if (this.type === 'advisory') { this.title = source.title this.url = source.url } else { this.title = `Depends on vulnerable versions of ${source.name}` this.url = null } this.severity = source.severity || 'high' this.versions = [] this.vulnerableVersions = [] this.cwe = source.cwe this.cvss = source.cvss // advisories have the range, metavulns do not // if an advisory doesn't specify range, assume all are vulnerable this.range = this.type === 'advisory' ? source.vulnerable_versions || '*' : null this.id = hash(this) this[_packument] = null // memoized list of which versions are vulnerable this[_versionVulnMemo] = new Map() // memoized list of which dependency specs are vulnerable this[_specVulnMemo] = new Map() this[_updated] = false } // true if we updated from what we had in cache get updated () { return this[_updated] } get type () { return this.dependency === this.name ? 'advisory' : 'metavuln' } get packument () { return this[_packument] } // load up the data from a cache entry and a fetched packument load (cached, packument) { // basic data integrity gutcheck if (!cached || typeof cached !== 'object') { throw new TypeError('invalid cached data, expected object') } if (!packument || typeof packument !== 'object') { throw new TypeError('invalid packument data, expected object') } if (cached.id && cached.id !== this.id) { throw Object.assign(new Error('loading from incorrect cache entry'), { expected: this.id, actual: cached.id, }) } if (packument.name !== this.name) { throw Object.assign(new Error('loading from incorrect packument'), { expected: this.name, actual: packument.name, }) } if (this[_packument]) { throw new Error('advisory object already loaded') } // if we have a range from the initialization, and the cached // data has a *different* range, then we know we have to recalc. // just don't use the cached data, so we will definitely not match later if (!this.range || cached.range && cached.range === this.range) { Object.assign(this, cached) } this[_packument] = packument const pakuVersions = Object.keys(packument.versions || {}) const allVersions = new Set([...pakuVersions, ...this.versions]) const versionsAdded = [] const versionsRemoved = [] for (const v of allVersions) { if (!this.versions.includes(v)) { versionsAdded.push(v) this.versions.push(v) } else if (!pakuVersions.includes(v)) { versionsRemoved.push(v) } } // strip out any removed versions from our lists, and sort by semver this.versions = semver.sort(this.versions.filter(v => !versionsRemoved.includes(v)), semverOpt) // if no changes, then just return what we got from cache // versions added or removed always means we changed // otherwise, advisories change if the range changes, and // metavulns change if the source was updated const unchanged = this.type === 'advisory' ? this.range && this.range === cached.range : !this[_source].updated // if the underlying source changed, by an advisory updating the // range, or a source advisory being updated, then we have to re-check // otherwise, only recheck the new ones. this.vulnerableVersions = !unchanged ? [] : semver.sort(this.vulnerableVersions.filter(v => !versionsRemoved.includes(v)), semverOpt) if (unchanged && !versionsAdded.length && !versionsRemoved.length) { // nothing added or removed, nothing to do here. use the cached copy. return this } this[_updated] = true // test any versions newly added if (!unchanged || versionsAdded.length) { this[_testVersions](unchanged ? versionsAdded : this.versions) } this.vulnerableVersions = semver.sort(this.vulnerableVersions, semverOpt) // metavulns have to calculate their range, since cache is invalidated // advisories just get their range from the advisory above if (this.type === 'metavuln') { this[_calculateRange]() } return this } [_calculateRange] () { // calling semver.simplifyRange with a massive list of versions, and those // versions all concatenated with `||` is a geometric CPU explosion! // we can try to be a *little* smarter up front by doing x-y for all // contiguous version sets in the list const ranges = [] this.versions = semver.sort(this.versions, semverOpt) this.vulnerableVersions = semver.sort(this.vulnerableVersions, semverOpt) for (let v = 0, vulnVer = 0; v < this.versions.length; v++) { // figure out the vulnerable subrange const vr = [this.versions[v]] while (v < this.versions.length) { if (this.versions[v] !== this.vulnerableVersions[vulnVer]) { // we don't test prerelease versions, so just skip past it if (/-/.test(this.versions[v])) { v++ continue } break } if (vr.length > 1) { vr[1] = this.versions[v] } else { vr.push(this.versions[v]) } v++ vulnVer++ } // it'll either be just the first version, which means no overlap, // or the start and end versions, which might be the same version if (vr.length > 1) { const tail = this.versions[this.versions.length - 1] ranges.push(vr[1] === tail ? `>=${vr[0]}` : vr[0] === vr[1] ? vr[0] : vr.join(' - ')) } } const metavuln = ranges.join(' || ').trim() this.range = !metavuln ? '<0.0.0-0' : semver.simplifyRange(this.versions, metavuln, semverOpt) } // returns true if marked as vulnerable, false if ok // spec is a dependency specifier, for metavuln cases // where the version might not be in the packument. if // we have the packument and spec is not provided, then // we use the dependency version from the manifest. testVersion (version, spec = null) { const sv = String(version) if (this[_versionVulnMemo].has(sv)) { return this[_versionVulnMemo].get(sv) } const result = this[_testVersion](version, spec) if (result) { this[_markVulnerable](version) } this[_versionVulnMemo].set(sv, !!result) return result } [_markVulnerable] (version) { const sv = String(version) if (!this.vulnerableVersions.includes(sv)) { this.vulnerableVersions.push(sv) } } [_testVersion] (version, spec) { const sv = String(version) if (this.vulnerableVersions.includes(sv)) { return true } if (this.type === 'advisory') { // advisory, just test range return semver.satisfies(version, this.range, semverOpt) } // check the dependency of this version on the vulnerable dep // if we got a version that's not in the packument, fall back on // the spec provided, if possible. const mani = this[_packument]?.versions?.[version] || { dependencies: { [this.dependency]: spec, }, } if (!spec) { spec = getDepSpec(mani, this.dependency) } // no dep, no vuln if (spec === null) { return false } if (!semver.validRange(spec, semverOpt)) { // not a semver range, nothing we can hope to do about it return true } const bd = mani.bundleDependencies const bundled = bd && bd.includes(this[_source].name) // XXX if bundled, then semver.intersects() means vulnerable // else, pick a manifest and see if it can't be avoided // try to pick a version of the dep that isn't vulnerable const avoid = this[_source].range if (bundled) { return semver.intersects(spec, avoid, semverOpt) } return this[_source].testSpec(spec) } testSpec (spec) { // testing all the versions is a bit costly, and the spec tends to stay // consistent across multiple versions, so memoize this as well, in case // we're testing lots of versions. const memo = this[_specVulnMemo] if (memo.has(spec)) { return memo.get(spec) } const res = this[_testSpec](spec) memo.set(spec, res) return res } [_testSpec] (spec) { for (const v of this.versions) { const satisfies = semver.satisfies(v, spec) if (!satisfies) { continue } if (!this.testVersion(v)) { return false } } // either vulnerable, or not installable because nothing satisfied // either way, best avoided. return true } [_testVersions] (versions) { if (!versions.length) { return } // set of lists of versions const versionSets = new Set() versions = semver.sort(versions.map(v => semver.parse(v, semverOpt))) // start out with the versions grouped by major and minor let last = versions[0].major + '.' + versions[0].minor let list = [] versionSets.add(list) for (const v of versions) { const k = v.major + '.' + v.minor if (k !== last) { last = k list = [] versionSets.add(list) } list.push(v) } for (const set of versionSets) { // it's common to have version lists like: // 1.0.0 // 1.0.1-alpha.0 // 1.0.1-alpha.1 // ... // 1.0.1-alpha.999 // 1.0.1 // 1.0.2-alpha.0 // ... // 1.0.2-alpha.99 // 1.0.2 // with a huge number of prerelease versions that are not installable // anyway. // If mid has a prerelease tag, and set[0] does not, then walk it // back until we hit a non-prerelease version // If mid has a prerelease tag, and set[set.length-1] does not, // then walk it forward until we hit a version without a prerelease tag // Similarly, if the head/tail is a prerelease, but there is a non-pr // version in the set, then start there instead. let h = 0 const origHeadVuln = this.testVersion(set[h]) while (h < set.length && /-/.test(String(set[h]))) { h++ } // don't filter out the whole list! they might all be pr's if (h === set.length) { h = 0 } else if (origHeadVuln) { // if the original was vulnerable, assume so are all of these for (let hh = 0; hh < h; hh++) { this[_markVulnerable](set[hh]) } } let t = set.length - 1 const origTailVuln = this.testVersion(set[t]) while (t > h && /-/.test(String(set[t]))) { t-- } // don't filter out the whole list! might all be pr's if (t === h) { t = set.length - 1 } else if (origTailVuln) { // if original tail was vulnerable, assume these are as well for (let tt = set.length - 1; tt > t; tt--) { this[_markVulnerable](set[tt]) } } const headVuln = h === 0 ? origHeadVuln : this.testVersion(set[h]) const tailVuln = t === set.length - 1 ? origTailVuln : this.testVersion(set[t]) // if head and tail both vulnerable, whole list is thrown out if (headVuln && tailVuln) { for (let v = h; v < t; v++) { this[_markVulnerable](set[v]) } continue } // if length is 2 or 1, then we marked them all already if (t < h + 2) { continue } const mid = Math.floor(set.length / 2) const pre = set.slice(0, mid) const post = set.slice(mid) // if the parent list wasn't prereleases, then drop pr tags // from end of the pre list, and beginning of the post list, // marking as vulnerable if the midpoint item we picked is. if (!/-/.test(String(pre[0]))) { const midVuln = this.testVersion(pre[pre.length - 1]) while (/-/.test(String(pre[pre.length - 1]))) { const v = pre.pop() if (midVuln) { this[_markVulnerable](v) } } } if (!/-/.test(String(post[post.length - 1]))) { const midVuln = this.testVersion(post[0]) while (/-/.test(String(post[0]))) { const v = post.shift() if (midVuln) { this[_markVulnerable](v) } } } versionSets.add(pre) versionSets.add(post) } } } module.exports = Advisory PK]�\Kt�s�� metavuln-calculator/lib/index.jsnu�[���// this is the public class that is used by consumers. // the Advisory class handles all the calculation, and this // class handles all the IO with the registry and cache. const pacote = require('pacote') const cacache = require('cacache') const { time } = require('proc-log') const Advisory = require('./advisory.js') const { homedir } = require('os') const jsonParse = require('json-parse-even-better-errors') const _packument = Symbol('packument') const _cachePut = Symbol('cachePut') const _cacheGet = Symbol('cacheGet') const _cacheData = Symbol('cacheData') const _packuments = Symbol('packuments') const _cache = Symbol('cache') const _options = Symbol('options') const _advisories = Symbol('advisories') const _calculate = Symbol('calculate') class Calculator { constructor (options = {}) { this[_options] = { ...options } this[_cache] = this[_options].cache || (homedir() + '/.npm/_cacache') this[_options].cache = this[_cache] this[_packuments] = new Map() this[_cacheData] = new Map() this[_advisories] = new Map() } get cache () { return this[_cache] } get options () { return { ...this[_options] } } async calculate (name, source) { const k = `security-advisory:${name}:${source.id}` if (this[_advisories].has(k)) { return this[_advisories].get(k) } const p = this[_calculate](name, source) this[_advisories].set(k, p) return p } async [_calculate] (name, source) { const k = `security-advisory:${name}:${source.id}` const timeEnd = time.start(`metavuln:calculate:${k}`) const advisory = new Advisory(name, source, this[_options]) // load packument and cached advisory const [cached, packument] = await Promise.all([ this[_cacheGet](advisory), this[_packument](name), ]) const timeEndLoad = time.start(`metavuln:load:${k}`) advisory.load(cached, packument) timeEndLoad() if (advisory.updated) { await this[_cachePut](advisory) } this[_advisories].set(k, advisory) timeEnd() return advisory } async [_cachePut] (advisory) { const { name, id } = advisory const key = `security-advisory:${name}:${id}` const timeEnd = time.start(`metavuln:cache:put:${key}`) const data = JSON.stringify(advisory) const options = { ...this[_options] } this[_cacheData].set(key, jsonParse(data)) await cacache.put(this[_cache], key, data, options).catch(() => {}) timeEnd() } async [_cacheGet] (advisory) { const { name, id } = advisory const key = `security-advisory:${name}:${id}` /* istanbul ignore if - should be impossible, since we memoize the * advisory object itself using the same key, just being cautious */ if (this[_cacheData].has(key)) { return this[_cacheData].get(key) } const timeEnd = time.start(`metavuln:cache:get:${key}`) const p = cacache.get(this[_cache], key, { ...this[_options] }) .catch(() => ({ data: '{}' })) .then(({ data }) => { data = jsonParse(data) timeEnd() this[_cacheData].set(key, data) return data }) this[_cacheData].set(key, p) return p } async [_packument] (name) { if (this[_packuments].has(name)) { return this[_packuments].get(name) } const timeEnd = time.start(`metavuln:packument:${name}`) const p = pacote.packument(name, { ...this[_options] }) .catch(() => { // presumably not something from the registry. // an empty packument will have an effective range of * return { name, versions: {}, } }) .then(paku => { timeEnd() this[_packuments].set(name, paku) return paku }) this[_packuments].set(name, p) return p } } module.exports = Calculator PK]�\��� 'metavuln-calculator/lib/get-dep-spec.jsnu�[���module.exports = (mani, name) => { // skip dev because that only matters at the root, // where we aren't fetching a manifest from the registry // with multiple versions anyway. const { dependencies: deps = {}, optionalDependencies: optDeps = {}, peerDependencies: peerDeps = {}, } = mani return deps && typeof deps[name] === 'string' ? deps[name] : optDeps && typeof optDeps[name] === 'string' ? optDeps[name] : peerDeps && typeof peerDeps[name] === 'string' ? peerDeps[name] : null } PK]�\.9����metavuln-calculator/LICENSEnu�[���The ISC License Copyright (c) npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\�2q�KKarborist/package.jsonnu�[���{ "_id": "@npmcli/arborist@7.5.3", "_inBundle": true, "_location": "/npm/@npmcli/arborist", "_phantomChildren": {}, "_requiredBy": [ "/npm", "/npm/libnpmdiff", "/npm/libnpmexec", "/npm/libnpmfund", "/npm/libnpmpack" ], "author": { "name": "GitHub Inc." }, "bin": { "arborist": "bin/index.js" }, "bugs": { "url": "https://github.com/npm/cli/issues" }, "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", "@npmcli/fs": "^3.1.1", "@npmcli/installed-package-contents": "^2.1.0", "@npmcli/map-workspaces": "^3.0.2", "@npmcli/metavuln-calculator": "^7.1.1", "@npmcli/name-from-folder": "^2.0.0", "@npmcli/node-gyp": "^3.0.0", "@npmcli/package-json": "^5.1.0", "@npmcli/query": "^3.1.0", "@npmcli/redact": "^2.0.0", "@npmcli/run-script": "^8.1.0", "bin-links": "^4.0.4", "cacache": "^18.0.3", "common-ancestor-path": "^1.0.1", "hosted-git-info": "^7.0.2", "json-parse-even-better-errors": "^3.0.2", "json-stringify-nice": "^1.1.4", "lru-cache": "^10.2.2", "minimatch": "^9.0.4", "nopt": "^7.2.1", "npm-install-checks": "^6.2.0", "npm-package-arg": "^11.0.2", "npm-pick-manifest": "^9.0.1", "npm-registry-fetch": "^17.0.1", "pacote": "^18.0.6", "parse-conflict-json": "^3.0.0", "proc-log": "^4.2.0", "proggy": "^2.0.0", "promise-all-reject-late": "^1.0.0", "promise-call-limit": "^3.0.1", "read-package-json-fast": "^3.0.2", "semver": "^7.3.7", "ssri": "^10.0.6", "treeverse": "^3.0.0", "walk-up-path": "^3.0.1" }, "description": "Manage node_modules trees", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.22.0", "benchmark": "^2.1.4", "minify-registry-metadata": "^3.0.0", "nock": "^13.3.3", "tap": "^16.3.8", "tar-stream": "^3.0.0", "tcompare": "^5.0.6" }, "engines": { "node": "^16.14.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/cli#readme", "license": "ISC", "main": "lib/index.js", "name": "@npmcli/arborist", "repository": { "type": "git", "url": "git+https://github.com/npm/cli.git", "directory": "workspaces/arborist" }, "scripts": { "benchclean": "rm -rf scripts/benchmark/*/", "benchmark": "node scripts/benchmark.js", "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap", "test-proxy": "ARBORIST_TEST_PROXY=1 tap --snapshot" }, "tap": { "after": "test/fixtures/cleanup.js", "test-env": [ "LC_ALL=sk" ], "timeout": "360", "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.22.0", "content": "../../scripts/template-oss/index.js" }, "version": "7.5.3" } PK]�\ .r�ffarborist/lib/printable.jsnu�[���// helper function to output a clearer visualization // of the current node and its descendents const localeCompare = require('@isaacs/string-locale-compare')('en') const util = require('node:util') const relpath = require('./relpath.js') class ArboristNode { constructor (tree, path) { this.name = tree.name if (tree.packageName && tree.packageName !== this.name) { this.packageName = tree.packageName } if (tree.version) { this.version = tree.version } this.location = tree.location this.path = tree.path if (tree.realpath !== this.path) { this.realpath = tree.realpath } if (tree.resolved !== null) { this.resolved = tree.resolved } if (tree.extraneous) { this.extraneous = true } if (tree.dev) { this.dev = true } if (tree.optional) { this.optional = true } if (tree.devOptional && !tree.dev && !tree.optional) { this.devOptional = true } if (tree.peer) { this.peer = true } if (tree.inBundle) { this.bundled = true } if (tree.inDepBundle) { this.bundler = tree.getBundler().location } if (tree.isProjectRoot) { this.isProjectRoot = true } if (tree.isWorkspace) { this.isWorkspace = true } const bd = tree.package && tree.package.bundleDependencies if (bd && bd.length) { this.bundleDependencies = bd } if (tree.inShrinkwrap) { this.inShrinkwrap = true } else if (tree.hasShrinkwrap) { this.hasShrinkwrap = true } if (tree.error) { this.error = treeError(tree.error) } if (tree.errors && tree.errors.length) { this.errors = tree.errors.map(treeError) } if (tree.overrides) { this.overrides = new Map([...tree.overrides.ruleset.values()] .map((override) => [override.key, override.value])) } // edgesOut sorted by name if (tree.edgesOut.size) { this.edgesOut = new Map([...tree.edgesOut.entries()] .sort(([a], [b]) => localeCompare(a, b)) .map(([name, edge]) => [name, new EdgeOut(edge)])) } // edgesIn sorted by location if (tree.edgesIn.size) { this.edgesIn = new Set([...tree.edgesIn] .sort((a, b) => localeCompare(a.from.location, b.from.location)) .map(edge => new EdgeIn(edge))) } if (tree.workspaces && tree.workspaces.size) { this.workspaces = new Map([...tree.workspaces.entries()] .map(([name, path]) => [name, relpath(tree.root.realpath, path)])) } // fsChildren sorted by path if (tree.fsChildren.size) { this.fsChildren = new Set([...tree.fsChildren] .sort(({ path: a }, { path: b }) => localeCompare(a, b)) .map(tree => printableTree(tree, path))) } // children sorted by name if (tree.children.size) { this.children = new Map([...tree.children.entries()] .sort(([a], [b]) => localeCompare(a, b)) .map(([name, tree]) => [name, printableTree(tree, path)])) } } } class ArboristVirtualNode extends ArboristNode { constructor (tree, path) { super(tree, path) this.sourceReference = printableTree(tree.sourceReference, path) } } class ArboristLink extends ArboristNode { constructor (tree, path) { super(tree, path) this.target = printableTree(tree.target, path) } } const treeError = ({ code, path }) => ({ code, ...(path ? { path } : {}), }) // print out edges without dumping the full node all over again // this base class will toJSON as a plain old object, but the // util.inspect() output will be a bit cleaner class Edge { constructor (edge) { this.type = edge.type this.name = edge.name this.spec = edge.rawSpec || '*' if (edge.rawSpec !== edge.spec) { this.override = edge.spec } if (edge.error) { this.error = edge.error } if (edge.peerConflicted) { this.peerConflicted = edge.peerConflicted } } } // don't care about 'from' for edges out class EdgeOut extends Edge { constructor (edge) { super(edge) this.to = edge.to && edge.to.location } [util.inspect.custom] () { return `{ ${this.type} ${this.name}@${this.spec}${ this.override ? ` overridden:${this.override}` : '' }${ this.to ? ' -> ' + this.to : '' }${ this.error ? ' ' + this.error : '' }${ this.peerConflicted ? ' peerConflicted' : '' } }` } } // don't care about 'to' for edges in class EdgeIn extends Edge { constructor (edge) { super(edge) this.from = edge.from && edge.from.location } [util.inspect.custom] () { return `{ ${this.from || '""'} ${this.type} ${this.name}@${this.spec}${ this.error ? ' ' + this.error : '' }${ this.peerConflicted ? ' peerConflicted' : '' } }` } } const printableTree = (tree, path = []) => { if (!tree) { return tree } const Cls = tree.isLink ? ArboristLink : tree.sourceReference ? ArboristVirtualNode : ArboristNode if (path.includes(tree)) { const obj = Object.create(Cls.prototype) return Object.assign(obj, { location: tree.location }) } path.push(tree) return new Cls(tree, path) } module.exports = printableTree PK]�\�����!arborist/lib/override-resolves.jsnu�[���function overrideResolves (resolved, opts) { const { omitLockfileRegistryResolved = false } = opts if (omitLockfileRegistryResolved) { return undefined } return resolved } module.exports = { overrideResolves } PK]�\>��}jjarborist/lib/spec-from-lock.jsnu�[���const npa = require('npm-package-arg') // extracted from npm v6 lib/install/realize-shrinkwrap-specifier.js const specFromLock = (name, lock, where) => { try { if (lock.version) { const spec = npa.resolve(name, lock.version, where) if (lock.integrity || spec.type === 'git') { return spec } } if (lock.from) { // legacy metadata includes "from", but not integrity const spec = npa.resolve(name, lock.from, where) if (spec.registry && lock.version) { return npa.resolve(name, lock.version, where) } else if (!lock.resolved) { return spec } } if (lock.resolved) { return npa.resolve(name, lock.resolved, where) } } catch { // ignore errors } try { return npa.resolve(name, lock.version, where) } catch { return {} } } module.exports = specFromLock PK]�\��arborist/lib/dep-valid.jsnu�[���// Do not rely on package._fields, so that we don't throw // false failures if a tree is generated by other clients. // Only relies on child.resolved, which MAY come from // client-specific package.json meta _fields, but most of // the time will be pulled out of a lockfile const semver = require('semver') const npa = require('npm-package-arg') const { relative } = require('node:path') const fromPath = require('./from-path.js') const depValid = (child, requested, requestor) => { // NB: we don't do much to verify 'tag' type requests. // Just verify that we got a remote resolution. Presumably, it // came from a registry and was tagged at some point. if (typeof requested === 'string') { try { // tarball/dir must have resolved to the same tgz on disk, but for // file: deps that depend on other files/dirs, we must resolve the // location based on the *requestor* file/dir, not where it ends up. // '' is equivalent to '*' requested = npa.resolve(child.name, requested || '*', fromPath(requestor, requestor.edgesOut.get(child.name))) } catch (er) { // Not invalid because the child doesn't match, but because // the spec itself is not supported. Nothing would match, // so the edge is definitely not valid and never can be. er.dependency = child.name er.requested = requested requestor.errors.push(er) return false } } // if the lockfile is super old, or hand-modified, // then it's possible to hit this state. if (!requested) { const er = new Error('Invalid dependency specifier') er.dependency = child.name er.requested = requested requestor.errors.push(er) return false } switch (requested.type) { case 'range': if (requested.fetchSpec === '*') { return true } // fallthrough case 'version': // if it's a version or a range other than '*', semver it return semver.satisfies(child.version, requested.fetchSpec, true) case 'directory': return linkValid(child, requested, requestor) case 'file': return tarballValid(child, requested, requestor) case 'alias': // check that the alias target is valid return depValid(child, requested.subSpec, requestor) case 'tag': // if it's a tag, we just verify that it has a tarball resolution // presumably, it came from the registry and was tagged at some point return child.resolved && npa(child.resolved).type === 'remote' case 'remote': // verify that we got it from the desired location return child.resolved === requested.fetchSpec case 'git': { // if it's a git type, verify that they're the same repo // // if it specifies a definite commit, then it must have the // same commit to be considered the same repo // // if it has a #semver:<range> specifier, verify that the // version in the package is in the semver range const resRepo = npa(child.resolved || '') const resHost = resRepo.hosted const reqHost = requested.hosted const reqCommit = /^[a-fA-F0-9]{40}$/.test(requested.gitCommittish || '') const nc = { noCommittish: !reqCommit } if (!resHost) { if (resRepo.fetchSpec !== requested.fetchSpec) { return false } } else { if (reqHost?.ssh(nc) !== resHost.ssh(nc)) { return false } } if (!requested.gitRange) { return true } return semver.satisfies(child.package.version, requested.gitRange, { loose: true, }) } default: // unpossible, just being cautious break } const er = new Error('Unsupported dependency type') er.dependency = child.name er.requested = requested requestor.errors.push(er) return false } const linkValid = (child, requested, requestor) => { const isLink = !!child.isLink // if we're installing links and the node is a link, then it's invalid because we want // a real node to be there. Except for workspaces. They are always links. if (requestor.installLinks && !child.isWorkspace) { return !isLink } // directory must be a link to the specified folder return isLink && relative(child.realpath, requested.fetchSpec) === '' } const tarballValid = (child, requested) => { if (child.isLink) { return false } if (child.resolved) { return child.resolved.replace(/\\/g, '/') === `file:${requested.fetchSpec.replace(/\\/g, '/')}` } // if we have a legacy mutated package.json file. we can't be 100% // sure that it resolved to the same file, but if it was the same // request, that's a pretty good indicator of sameness. if (child.package._requested) { return child.package._requested.saveSpec === requested.saveSpec } // ok, we're probably dealing with some legacy cruft here, not much // we can do at this point unfortunately. return false } module.exports = (child, requested, accept, requestor) => depValid(child, requested, requestor) || (typeof accept === 'string' ? depValid(child, accept, requestor) : false) PK]�\"�XOOarborist/lib/place-dep.jsnu�[���// Given a dep, a node that depends on it, and the edge representing that // dependency, place the dep somewhere in the node's tree, and all of its // peer dependencies. // // Handles all of the tree updating needed to place the dep, including // removing replaced nodes, pruning now-extraneous or invalidated nodes, // and saves a set of what was placed and what needs re-evaluation as // a result. const localeCompare = require('@isaacs/string-locale-compare')('en') const { log } = require('proc-log') const { redact } = require('@npmcli/redact') const deepestNestingTarget = require('./deepest-nesting-target.js') const CanPlaceDep = require('./can-place-dep.js') const { KEEP, CONFLICT, } = CanPlaceDep const debug = require('./debug.js') const Link = require('./link.js') const gatherDepSet = require('./gather-dep-set.js') const peerEntrySets = require('./peer-entry-sets.js') class PlaceDep { constructor (options) { this.auditReport = options.auditReport this.dep = options.dep this.edge = options.edge this.explicitRequest = options.explicitRequest this.force = options.force this.installLinks = options.installLinks this.installStrategy = options.installStrategy this.legacyPeerDeps = options.legacyPeerDeps this.parent = options.parent || null this.preferDedupe = options.preferDedupe this.strictPeerDeps = options.strictPeerDeps this.updateNames = options.updateNames this.canPlace = null this.canPlaceSelf = null // XXX this only appears to be used by tests this.checks = new Map() this.children = [] this.needEvaluation = new Set() this.peerConflict = null this.placed = null this.target = null this.current = this.edge.to this.name = this.edge.name this.top = this.parent?.top || this // nothing to do if the edge is fine as it is if (this.edge.to && !this.edge.error && !this.explicitRequest && !this.updateNames.includes(this.edge.name) && !this.auditReport?.isVulnerable(this.edge.to)) { return } // walk up the tree until we hit either a top/root node, or a place // where the dep is not a peer dep. const start = this.getStartNode() for (const target of start.ancestry()) { // if the current location has a peerDep on it, then we can't place here // this is pretty rare to hit, since we always prefer deduping peers, // and the getStartNode will start us out above any peers from the // thing that depends on it. but we could hit it with something like: // // a -> (b@1, c@1) // +-- c@1 // +-- b -> PEEROPTIONAL(v) (c@2) // +-- c@2 -> (v) // // So we check if we can place v under c@2, that's fine. // Then we check under b, and can't, because of the optional peer dep. // but we CAN place it under a, so the correct thing to do is keep // walking up the tree. const targetEdge = target.edgesOut.get(this.edge.name) if (!target.isTop && targetEdge && targetEdge.peer) { continue } const cpd = new CanPlaceDep({ dep: this.dep, edge: this.edge, // note: this sets the parent's canPlace as the parent of this // canPlace, but it does NOT add this canPlace to the parent's // children. This way, we can know that it's a peer dep, and // get the top edge easily, while still maintaining the // tree of checks that factored into the original decision. parent: this.parent && this.parent.canPlace, target, preferDedupe: this.preferDedupe, explicitRequest: this.explicitRequest, }) this.checks.set(target, cpd) // It's possible that a "conflict" is a conflict among the *peers* of // a given node we're trying to place, but there actually is no current // node. Eg, // root -> (a, b) // a -> PEER(c) // b -> PEER(d) // d -> PEER(c@2) // We place (a), and get a peer of (c) along with it. // then we try to place (b), and get CONFLICT in the check, because // of the conflicting peer from (b)->(d)->(c@2). In that case, we // should treat (b) and (d) as OK, and place them in the last place // where they did not themselves conflict, and skip c@2 if conflict // is ok by virtue of being forced or not ours and not strict. if (cpd.canPlaceSelf !== CONFLICT) { this.canPlaceSelf = cpd } // we found a place this can go, along with all its peer friends. // we break when we get the first conflict if (cpd.canPlace !== CONFLICT) { this.canPlace = cpd } else { break } // if it's a load failure, just plop it in the first place attempted, // since we're going to crash the build or prune it out anyway. // but, this will frequently NOT be a successful canPlace, because // it'll have no version or other information. if (this.dep.errors.length) { break } // nest packages like npm v1 and v2 // very disk-inefficient if (this.installStrategy === 'nested') { break } // when installing globally, or just in global style, we never place // deps above the first level. if (this.installStrategy === 'shallow') { const rp = target.resolveParent if (rp && rp.isProjectRoot) { break } } } // if we can't find a target, that means that the last place checked, // and all the places before it, had a conflict. if (!this.canPlace) { // if not forced, and it's our dep, or strictPeerDeps is set, then // this is an ERESOLVE error. if (!this.force && (this.isMine || this.strictPeerDeps)) { return this.failPeerConflict() } // ok! we're gonna allow the conflict, but we should still warn // if we have a current, then we treat CONFLICT as a KEEP. // otherwise, we just skip it. Only warn on the one that actually // could not be placed somewhere. if (!this.canPlaceSelf) { this.warnPeerConflict() return } this.canPlace = this.canPlaceSelf } // now we have a target, a tree of CanPlaceDep results for the peer group, // and we are ready to go /* istanbul ignore next */ if (!this.canPlace) { debug(() => { throw new Error('canPlace not set, but trying to place in tree') }) return } const { target } = this.canPlace log.silly( 'placeDep', target.location || 'ROOT', `${this.dep.name}@${this.dep.version}`, this.canPlace.description, `for: ${this.edge.from.package._id || this.edge.from.location}`, `want: ${redact(this.edge.spec || '*')}` ) const placementType = this.canPlace.canPlace === CONFLICT ? this.canPlace.canPlaceSelf : this.canPlace.canPlace // if we're placing in the tree with --force, we can get here even though // it's a conflict. Treat it as a KEEP, but warn and move on. if (placementType === KEEP) { // this was a peerConflicted peer dep if (this.edge.peer && !this.edge.valid) { this.warnPeerConflict() } // if we get a KEEP in a update scenario, then we MAY have something // already duplicating this unnecessarily! For example: // ``` // root (dep: y@1) // +-- x (dep: y@1.1) // | +-- y@1.1.0 (replacing with 1.1.2, got KEEP at the root) // +-- y@1.1.2 (updated already from 1.0.0) // ``` // Now say we do `reify({update:['y']})`, and the latest version is // 1.1.2, which we now have in the root. We'll try to place y@1.1.2 // first in x, then in the root, ending with KEEP, because we already // have it. In that case, we ought to REMOVE the nm/x/nm/y node, because // it is an unnecessary duplicate. this.pruneDedupable(target) return } // we were told to place it here in the target, so either it does not // already exist in the tree, OR it's shadowed. // handle otherwise unresolvable dependency nesting loops by // creating a symbolic link // a1 -> b1 -> a2 -> b2 -> a1 -> ... // instead of nesting forever, when the loop occurs, create // a symbolic link to the earlier instance for (let p = target; p; p = p.resolveParent) { if (p.matches(this.dep) && !p.isTop) { this.placed = new Link({ parent: target, target: p }) return } } // XXX if we are replacing SOME of a peer entry group, we will need to // remove any that are not being replaced and will now be invalid, and // re-evaluate them deeper into the tree. const virtualRoot = this.dep.parent this.placed = new this.dep.constructor({ name: this.dep.name, pkg: this.dep.package, resolved: this.dep.resolved, integrity: this.dep.integrity, installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, error: this.dep.errors[0], ...(this.dep.overrides ? { overrides: this.dep.overrides } : {}), ...(this.dep.isLink ? { target: this.dep.target, realpath: this.dep.realpath } : {}), }) this.oldDep = target.children.get(this.name) if (this.oldDep) { this.replaceOldDep() } else { this.placed.parent = target } // if it's a peerConflicted peer dep, warn about it if (this.edge.peer && !this.placed.satisfies(this.edge)) { this.warnPeerConflict() } // If the edge is not an error, then we're updating something, and // MAY end up putting a better/identical node further up the tree in // a way that causes an unnecessary duplication. If so, remove the // now-unnecessary node. if (this.edge.valid && this.edge.to && this.edge.to !== this.placed) { this.pruneDedupable(this.edge.to, false) } // in case we just made some duplicates that can be removed, // prune anything deeper in the tree that can be replaced by this for (const node of target.root.inventory.query('name', this.name)) { if (node.isDescendantOf(target) && !node.isTop) { this.pruneDedupable(node, false) // only walk the direct children of the ones we kept if (node.root === target.root) { for (const kid of node.children.values()) { this.pruneDedupable(kid, false) } } } } // also place its unmet or invalid peer deps at this location // loop through any peer deps from the thing we just placed, and place // those ones as well. it's safe to do this with the virtual nodes, // because we're copying rather than moving them out of the virtual root, // otherwise they'd be gone and the peer set would change throughout // this loop. for (const peerEdge of this.placed.edgesOut.values()) { if (peerEdge.valid || !peerEdge.peer || peerEdge.peerConflicted) { continue } const peer = virtualRoot.children.get(peerEdge.name) // Note: if the virtualRoot *doesn't* have the peer, then that means // it's an optional peer dep. If it's not being properly met (ie, // peerEdge.valid is false), then this is likely heading for an // ERESOLVE error, unless it can walk further up the tree. if (!peer) { continue } // peerConflicted peerEdge, just accept what's there already if (!peer.satisfies(peerEdge)) { continue } this.children.push(new PlaceDep({ auditReport: this.auditReport, explicitRequest: this.explicitRequest, force: this.force, installLinks: this.installLinks, installStrategy: this.installStrategy, legacyPeerDeps: this.legaycPeerDeps, preferDedupe: this.preferDedupe, strictPeerDeps: this.strictPeerDeps, updateNames: this.updateName, parent: this, dep: peer, node: this.placed, edge: peerEdge, })) } } replaceOldDep () { const target = this.oldDep.parent // XXX handle replacing an entire peer group? // what about cases where we need to push some other peer groups deeper // into the tree? all the tree updating should be done here, and track // all the things that we add and remove, so that we can know what // to re-evaluate. // if we're replacing, we should also remove any nodes for edges that // are now invalid, and where this (or its deps) is the only dependent, // and also recurse on that pruning. Otherwise leaving that dep node // around can result in spurious conflicts pushing nodes deeper into // the tree than needed in the case of cycles that will be removed // later anyway. const oldDeps = [] for (const [name, edge] of this.oldDep.edgesOut.entries()) { if (!this.placed.edgesOut.has(name) && edge.to) { oldDeps.push(...gatherDepSet([edge.to], e => e.to !== edge.to)) } } // gather all peer edgesIn which are at this level, and will not be // satisfied by the new dependency. Those are the peer sets that need // to be either warned about (if they cannot go deeper), or removed and // re-placed (if they can). const prunePeerSets = [] for (const edge of this.oldDep.edgesIn) { if (this.placed.satisfies(edge) || !edge.peer || edge.from.parent !== target || edge.peerConflicted) { // not a peer dep, not invalid, or not from this level, so it's fine // to just let it re-evaluate as a problemEdge later, or let it be // satisfied by the new dep being placed. continue } for (const entryEdge of peerEntrySets(edge.from).keys()) { // either this one needs to be pruned and re-evaluated, or marked // as peerConflicted and warned about. If the entryEdge comes in from // the root or a workspace, then we have to leave it alone, and in that // case, it will have already warned or crashed by getting to this point const entryNode = entryEdge.to const deepestTarget = deepestNestingTarget(entryNode) if (deepestTarget !== target && !(entryEdge.from.isProjectRoot || entryEdge.from.isWorkspace)) { prunePeerSets.push(...gatherDepSet([entryNode], e => { return e.to !== entryNode && !e.peerConflicted })) } else { this.warnPeerConflict(edge, this.dep) } } } this.placed.replace(this.oldDep) this.pruneForReplacement(this.placed, oldDeps) for (const dep of prunePeerSets) { for (const edge of dep.edgesIn) { this.needEvaluation.add(edge.from) } dep.root = null } } pruneForReplacement (node, oldDeps) { // gather up all the now-invalid/extraneous edgesOut, as long as they are // only depended upon by the old node/deps const invalidDeps = new Set([...node.edgesOut.values()] .filter(e => e.to && !e.valid).map(e => e.to)) for (const dep of oldDeps) { const set = gatherDepSet([dep], e => e.to !== dep && e.valid) for (const dep of set) { invalidDeps.add(dep) } } // ignore dependency edges from the node being replaced, but // otherwise filter the set down to just the set with no // dependencies from outside the set, except the node in question. const deps = gatherDepSet(invalidDeps, edge => edge.from !== node && edge.to !== node && edge.valid) // now just delete whatever's left, because it's junk for (const dep of deps) { dep.root = null } } // prune all the nodes in a branch of the tree that can be safely removed // This is only the most basic duplication detection; it finds if there // is another satisfying node further up the tree, and if so, dedupes. // Even in installStategy is nested, we do this amount of deduplication. pruneDedupable (node, descend = true) { if (node.canDedupe(this.preferDedupe)) { // gather up all deps that have no valid edges in from outside // the dep set, except for this node we're deduping, so that we // also prune deps that would be made extraneous. const deps = gatherDepSet([node], e => e.to !== node && e.valid) for (const node of deps) { node.root = null } return } if (descend) { // sort these so that they're deterministically ordered // otherwise, resulting tree shape is dependent on the order // in which they happened to be resolved. const nodeSort = (a, b) => localeCompare(a.location, b.location) const children = [...node.children.values()].sort(nodeSort) for (const child of children) { this.pruneDedupable(child) } const fsChildren = [...node.fsChildren].sort(nodeSort) for (const topNode of fsChildren) { const children = [...topNode.children.values()].sort(nodeSort) for (const child of children) { this.pruneDedupable(child) } } } } get isMine () { const { edge } = this.top const { from: node } = edge if (node.isWorkspace || node.isProjectRoot) { return true } if (!edge.peer) { return false } // re-entry case. check if any non-peer edges come from the project, // or any entryEdges on peer groups are from the root. let hasPeerEdges = false for (const edge of node.edgesIn) { if (edge.peer) { hasPeerEdges = true continue } if (edge.from.isWorkspace || edge.from.isProjectRoot) { return true } } if (hasPeerEdges) { for (const edge of peerEntrySets(node).keys()) { if (edge.from.isWorkspace || edge.from.isProjectRoot) { return true } } } return false } warnPeerConflict (edge, dep) { edge = edge || this.edge dep = dep || this.dep edge.peerConflicted = true const expl = this.explainPeerConflict(edge, dep) log.warn('ERESOLVE', 'overriding peer dependency', expl) } failPeerConflict (edge, dep) { edge = edge || this.top.edge dep = dep || this.top.dep const expl = this.explainPeerConflict(edge, dep) throw Object.assign(new Error('could not resolve'), expl) } explainPeerConflict (edge, dep) { const { from: node } = edge const curNode = node.resolve(edge.name) // XXX decorate more with this.canPlace and this.canPlaceSelf, // this.checks, this.children, walk over conflicted peers, etc. const expl = { code: 'ERESOLVE', edge: edge.explain(), dep: dep.explain(edge), force: this.force, isMine: this.isMine, strictPeerDeps: this.strictPeerDeps, } if (this.parent) { // this is the conflicted peer expl.current = curNode && curNode.explain(edge) expl.peerConflict = this.current && this.current.explain(this.edge) } else { expl.current = curNode && curNode.explain() if (this.canPlaceSelf && this.canPlaceSelf.canPlaceSelf !== CONFLICT) { // failed while checking for a child dep const cps = this.canPlaceSelf for (const peer of cps.conflictChildren) { if (peer.current) { expl.peerConflict = { current: peer.current.explain(), peer: peer.dep.explain(peer.edge), } break } } } else { expl.peerConflict = { current: this.current && this.current.explain(), peer: this.dep.explain(this.edge), } } } return expl } getStartNode () { // if we are a peer, then we MUST be at least as shallow as the peer // dependent const from = this.parent?.getStartNode() || this.edge.from return deepestNestingTarget(from, this.name) } // XXX this only appears to be used by tests get allChildren () { const set = new Set(this.children) for (const child of set) { for (const grandchild of child.children) { set.add(grandchild) } } return [...set] } } module.exports = PlaceDep PK]�\��N��arborist/lib/tracker.jsnu�[���const proggy = require('proggy') module.exports = cls => class Tracker extends cls { #progress = new Map() #createTracker (key, name) { const tracker = new proggy.Tracker(name ?? key) tracker.on('done', () => this.#progress.delete(key)) this.#progress.set(key, tracker) } addTracker (section, subsection = null, key = null) { if (section === null || section === undefined) { this.#onError(`Tracker can't be null or undefined`) } if (key === null) { key = subsection } const hasTracker = this.#progress.has(section) const hasSubtracker = this.#progress.has(`${section}:${key}`) if (hasTracker && subsection === null) { // 0. existing tracker, no subsection this.#onError(`Tracker "${section}" already exists`) } else if (!hasTracker && subsection === null) { // 1. no existing tracker, no subsection // Create a new progress tracker this.#createTracker(section) } else if (!hasTracker && subsection !== null) { // 2. no parent tracker and subsection this.#onError(`Parent tracker "${section}" does not exist`) } else if (!hasTracker || !hasSubtracker) { // 3. existing parent tracker, no subsection tracker // Create a new subtracker and update parents const parentTracker = this.#progress.get(section) parentTracker.update(parentTracker.value, parentTracker.total + 1) this.#createTracker(`${section}:${key}`, `${section}:${subsection}`) } // 4. existing parent tracker, existing subsection tracker // skip it } finishTracker (section, subsection = null, key = null) { if (section === null || section === undefined) { this.#onError(`Tracker can't be null or undefined`) } if (key === null) { key = subsection } const hasTracker = this.#progress.has(section) const hasSubtracker = this.#progress.has(`${section}:${key}`) // 0. parent tracker exists, no subsection // Finish parent tracker and remove from this.#progress if (hasTracker && subsection === null) { // check if parent tracker does // not have any remaining children const keys = this.#progress.keys() for (const key of keys) { if (key.match(new RegExp(section + ':'))) { this.finishTracker(section, key) } } // remove parent tracker this.#progress.get(section).finish() } else if (!hasTracker && subsection === null) { // 1. no existing parent tracker, no subsection this.#onError(`Tracker "${section}" does not exist`) } else if (!hasTracker || hasSubtracker) { // 2. subtracker exists // Finish subtracker and remove from this.#progress const parentTracker = this.#progress.get(section) parentTracker.update(parentTracker.value + 1) this.#progress.get(`${section}:${key}`).finish() } // 3. existing parent tracker, no subsection } #onError (msg) { throw new Error(msg) } } PK]�\ւy��arborist/lib/add-rm-pkg-deps.jsnu�[���// add and remove dependency specs to/from pkg manifest const { log } = require('proc-log') const localeCompare = require('@isaacs/string-locale-compare')('en') const add = ({ pkg, add, saveBundle, saveType }) => { for (const { name, rawSpec } of add) { let addSaveType = saveType // if the user does not give us a type, we infer which type(s) // to keep based on the same order of priority we do when // building the tree as defined in the _loadDeps method of // the node class. if (!addSaveType) { addSaveType = inferSaveType(pkg, name) } if (addSaveType === 'prod') { // a production dependency can only exist as production (rpj ensures it // doesn't coexist w/ optional) deleteSubKey(pkg, 'devDependencies', name, 'dependencies') deleteSubKey(pkg, 'peerDependencies', name, 'dependencies') } else if (addSaveType === 'dev') { // a dev dependency may co-exist as peer, or optional, but not production deleteSubKey(pkg, 'dependencies', name, 'devDependencies') } else if (addSaveType === 'optional') { // an optional dependency may co-exist as dev (rpj ensures it doesn't // coexist w/ prod) deleteSubKey(pkg, 'peerDependencies', name, 'optionalDependencies') } else { // peer or peerOptional is all that's left // a peer dependency may coexist as dev deleteSubKey(pkg, 'dependencies', name, 'peerDependencies') deleteSubKey(pkg, 'optionalDependencies', name, 'peerDependencies') } const depType = saveTypeMap.get(addSaveType) pkg[depType] = pkg[depType] || {} if (rawSpec !== '*' || pkg[depType][name] === undefined) { pkg[depType][name] = rawSpec } if (addSaveType === 'optional') { // Affordance for previous npm versions that require this behaviour pkg.dependencies = pkg.dependencies || {} pkg.dependencies[name] = pkg.optionalDependencies[name] } if (addSaveType === 'peer' || addSaveType === 'peerOptional') { const pdm = pkg.peerDependenciesMeta || {} if (addSaveType === 'peer' && pdm[name] && pdm[name].optional) { pdm[name].optional = false } else if (addSaveType === 'peerOptional') { pdm[name] = pdm[name] || {} pdm[name].optional = true pkg.peerDependenciesMeta = pdm } // peerDeps are often also a devDep, so that they can be tested when // using package managers that don't auto-install peer deps if (pkg.devDependencies && pkg.devDependencies[name] !== undefined) { pkg.devDependencies[name] = pkg.peerDependencies[name] } } if (saveBundle && addSaveType !== 'peer' && addSaveType !== 'peerOptional') { // keep it sorted, keep it unique const bd = new Set(pkg.bundleDependencies || []) bd.add(name) pkg.bundleDependencies = [...bd].sort(localeCompare) } } return pkg } // Canonical source of both the map between saveType and where it correlates to // in the package, and the names of all our dependencies attributes const saveTypeMap = new Map([ ['dev', 'devDependencies'], ['optional', 'optionalDependencies'], ['prod', 'dependencies'], ['peerOptional', 'peerDependencies'], ['peer', 'peerDependencies'], ]) // Finds where the package is already in the spec and infers saveType from that const inferSaveType = (pkg, name) => { for (const saveType of saveTypeMap.keys()) { if (hasSubKey(pkg, saveTypeMap.get(saveType), name)) { if ( saveType === 'peerOptional' && (!hasSubKey(pkg, 'peerDependenciesMeta', name) || !pkg.peerDependenciesMeta[name].optional) ) { return 'peer' } return saveType } } return 'prod' } const hasSubKey = (pkg, depType, name) => { return pkg[depType] && Object.prototype.hasOwnProperty.call(pkg[depType], name) } // Removes a subkey and warns about it if it's being replaced const deleteSubKey = (pkg, depType, name, replacedBy) => { if (hasSubKey(pkg, depType, name)) { if (replacedBy) { log.warn('idealTree', `Removing ${depType}.${name} in favor of ${replacedBy}.${name}`) } delete pkg[depType][name] // clean up peerDepsMeta if we are removing something from peerDependencies if (depType === 'peerDependencies' && pkg.peerDependenciesMeta) { delete pkg.peerDependenciesMeta[name] if (!Object.keys(pkg.peerDependenciesMeta).length) { delete pkg.peerDependenciesMeta } } if (!Object.keys(pkg[depType]).length) { delete pkg[depType] } } } const rm = (pkg, rm) => { for (const depType of new Set(saveTypeMap.values())) { for (const name of rm) { deleteSubKey(pkg, depType, name) } } if (pkg.bundleDependencies) { pkg.bundleDependencies = pkg.bundleDependencies .filter(name => !rm.includes(name)) if (!pkg.bundleDependencies.length) { delete pkg.bundleDependencies } } return pkg } module.exports = { add, rm, saveTypeMap, hasSubKey } PK]�\ź�%%"arborist/lib/consistent-resolve.jsnu�[���// take a path and a resolved value, and turn it into a resolution from // the given new path. This is used with converting a package.json's // relative file: path into one suitable for a lockfile, or between // lockfiles, and for converting hosted git repos to a consistent url type. const npa = require('npm-package-arg') const relpath = require('./relpath.js') const consistentResolve = (resolved, fromPath, toPath, relPaths = false) => { if (!resolved) { return null } try { const hostedOpt = { noCommittish: false } const { fetchSpec, saveSpec, type, hosted, rawSpec, raw, } = npa(resolved, fromPath) if (type === 'file' || type === 'directory') { const cleanFetchSpec = fetchSpec.replace(/#/g, '%23') if (relPaths && toPath) { return `file:${relpath(toPath, cleanFetchSpec)}` } return `file:${cleanFetchSpec}` } if (hosted) { return `git+${hosted.auth ? hosted.https(hostedOpt) : hosted.sshurl(hostedOpt)}` } if (type === 'git') { return saveSpec } if (rawSpec === '*') { return raw } return rawSpec } catch (_) { // whatever we passed in was not acceptable to npa. // leave it 100% untouched. return resolved } } module.exports = consistentResolve PK]�\�b�Q��&arborist/lib/deepest-nesting-target.jsnu�[���// given a starting node, what is the *deepest* target where name could go? // This is not on the Node class for the simple reason that we sometimes // need to check the deepest *potential* target for a Node that is not yet // added to the tree where we are checking. const deepestNestingTarget = (start, name) => { for (const target of start.ancestry()) { // note: this will skip past the first target if edge is peer if (target.isProjectRoot || !target.resolveParent || target.globalTop) { return target } const targetEdge = target.edgesOut.get(name) if (!targetEdge || !targetEdge.peer) { return target } } } module.exports = deepestNestingTarget PK]�\:��\??arborist/lib/link.jsnu�[���const relpath = require('./relpath.js') const Node = require('./node.js') const _loadDeps = Symbol.for('Arborist.Node._loadDeps') const _target = Symbol.for('_target') const { dirname } = require('node:path') // defined by Node class const _delistFromMeta = Symbol.for('_delistFromMeta') const _refreshLocation = Symbol.for('_refreshLocation') class Link extends Node { constructor (options) { const { root, realpath, target, parent, fsParent, isStoreLink } = options if (!realpath && !(target && target.path)) { throw new TypeError('must provide realpath for Link node') } super({ ...options, realpath: realpath || target.path, root: root || (parent ? parent.root : fsParent ? fsParent.root : target ? target.root : null), }) this.isStoreLink = isStoreLink || false if (target) { this.target = target } else if (this.realpath === this.root.path) { this.target = this.root } else { this.target = new Node({ ...options, path: realpath, parent: null, fsParent: null, root: this.root, }) } } get version () { return this.target ? this.target.version : this.package.version || '' } get target () { return this[_target] } set target (target) { const current = this[_target] if (target === current) { return } if (!target) { if (current && current.linksIn) { current.linksIn.delete(this) } if (this.path) { this[_delistFromMeta]() this[_target] = null this.package = {} this[_refreshLocation]() } else { this[_target] = null } return } if (!this.path) { // temp node pending assignment to a tree // we know it's not in the inventory yet, because no path. if (target.path) { this.realpath = target.path } else { target.path = target.realpath = this.realpath } target.root = this.root this[_target] = target target.linksIn.add(this) this.package = target.package return } // have to refresh metadata, because either realpath or package // is very likely changing. this[_delistFromMeta]() this.package = target.package this.realpath = target.path this[_refreshLocation]() target.root = this.root } // a link always resolves to the relative path to its target get resolved () { // the path/realpath guard is there for the benefit of setting // these things in the "wrong" order return this.path && this.realpath ? `file:${relpath(dirname(this.path), this.realpath).replace(/#/g, '%23')}` : null } set resolved (r) {} // deps are resolved on the target, not the Link // so this is a no-op [_loadDeps] () {} // links can't have children, only their targets can // fix it to an empty list so that we can still call // things that iterate over them, just as a no-op get children () { return new Map() } set children (c) {} get isLink () { return true } } module.exports = Link PK]�\]}L_ _ arborist/lib/realpath.jsnu�[���// look up the realpath, but cache stats to minimize overhead // If the parent folder is in the realpath cache, then we just // lstat the child, since there's no need to do a full realpath // This is not a separate module, and is much simpler than Node's // built-in fs.realpath, because we only care about symbolic links, // so we can handle many fewer edge cases. const { lstat, readlink } = require('node:fs/promises') const { resolve, basename, dirname } = require('node:path') const realpathCached = (path, rpcache, stcache, depth) => { // just a safety against extremely deep eloops /* istanbul ignore next */ if (depth > 2000) { throw eloop(path) } path = resolve(path) if (rpcache.has(path)) { return Promise.resolve(rpcache.get(path)) } const dir = dirname(path) const base = basename(path) if (base && rpcache.has(dir)) { return realpathChild(dir, base, rpcache, stcache, depth) } // if it's the root, then we know it's real if (!base) { rpcache.set(dir, dir) return Promise.resolve(dir) } // the parent, what is that? // find out, and then come back. return realpathCached(dir, rpcache, stcache, depth + 1).then(() => realpathCached(path, rpcache, stcache, depth + 1)) } const lstatCached = (path, stcache) => { if (stcache.has(path)) { return Promise.resolve(stcache.get(path)) } const p = lstat(path).then(st => { stcache.set(path, st) return st }) stcache.set(path, p) return p } // This is a slight fib, as it doesn't actually occur during a stat syscall. // But file systems are giant piles of lies, so whatever. const eloop = path => Object.assign(new Error( `ELOOP: too many symbolic links encountered, stat '${path}'`), { errno: -62, syscall: 'stat', code: 'ELOOP', path: path, }) const realpathChild = (dir, base, rpcache, stcache, depth) => { const realdir = rpcache.get(dir) // that unpossible /* istanbul ignore next */ if (typeof realdir === 'undefined') { throw new Error('in realpathChild without parent being in realpath cache') } const realish = resolve(realdir, base) return lstatCached(realish, stcache).then(st => { if (!st.isSymbolicLink()) { rpcache.set(resolve(dir, base), realish) return realish } return readlink(realish).then(target => { const resolved = resolve(realdir, target) if (realish === resolved) { throw eloop(realish) } return realpathCached(resolved, rpcache, stcache, depth + 1) }).then(real => { rpcache.set(resolve(dir, base), real) return real }) }) } module.exports = realpathCached PK]�\���2�/�/arborist/lib/audit-report.jsnu�[���// an object representing the set of vulnerabilities in a tree /* eslint camelcase: "off" */ const localeCompare = require('@isaacs/string-locale-compare')('en') const npa = require('npm-package-arg') const pickManifest = require('npm-pick-manifest') const Vuln = require('./vuln.js') const Calculator = require('@npmcli/metavuln-calculator') const _getReport = Symbol('getReport') const _fixAvailable = Symbol('fixAvailable') const _checkTopNode = Symbol('checkTopNode') const _init = Symbol('init') const _omit = Symbol('omit') const { log, time } = require('proc-log') const fetch = require('npm-registry-fetch') class AuditReport extends Map { static load (tree, opts) { return new AuditReport(tree, opts).run() } get auditReportVersion () { return 2 } toJSON () { const obj = { auditReportVersion: this.auditReportVersion, vulnerabilities: {}, metadata: { vulnerabilities: { info: 0, low: 0, moderate: 0, high: 0, critical: 0, total: this.size, }, dependencies: { prod: 0, dev: 0, optional: 0, peer: 0, peerOptional: 0, total: this.tree.inventory.size - 1, }, }, } for (const node of this.tree.inventory.values()) { const { dependencies } = obj.metadata let prod = true for (const type of [ 'dev', 'optional', 'peer', 'peerOptional', ]) { if (node[type]) { dependencies[type]++ prod = false } } if (prod) { dependencies.prod++ } } // if it doesn't have any topVulns, then it's fixable with audit fix // for each topVuln, figure out if it's fixable with audit fix --force, // or if we have to just delete the thing, and if the fix --force will // require a semver major update. const vulnerabilities = [] for (const [name, vuln] of this.entries()) { vulnerabilities.push([name, vuln.toJSON()]) obj.metadata.vulnerabilities[vuln.severity]++ } obj.vulnerabilities = vulnerabilities .sort(([a], [b]) => localeCompare(a, b)) .reduce((set, [name, vuln]) => { set[name] = vuln return set }, {}) return obj } constructor (tree, opts = {}) { super() const { omit } = opts this[_omit] = new Set(omit || []) this.topVulns = new Map() this.calculator = new Calculator(opts) this.error = null this.options = opts this.tree = tree this.filterSet = opts.filterSet } async run () { this.report = await this[_getReport]() log.silly('audit report', this.report) if (this.report) { await this[_init]() } return this } isVulnerable (node) { const vuln = this.get(node.packageName) return !!(vuln && vuln.isVulnerable(node)) } async [_init] () { const timeEnd = time.start('auditReport:init') const promises = [] for (const [name, advisories] of Object.entries(this.report)) { for (const advisory of advisories) { promises.push(this.calculator.calculate(name, advisory)) } } // now the advisories are calculated with a set of versions // and the packument. turn them into our style of vuln objects // which also have the affected nodes, and also create entries // for all the metavulns that we find from dependents. const advisories = new Set(await Promise.all(promises)) const seen = new Set() for (const advisory of advisories) { const { name, range } = advisory const k = `${name}@${range}` const vuln = this.get(name) || new Vuln({ name, advisory }) if (this.has(name)) { vuln.addAdvisory(advisory) } super.set(name, vuln) // don't flag the exact same name/range more than once // adding multiple advisories with the same range is fine, but no // need to search for nodes we already would have added. if (!seen.has(k)) { const p = [] for (const node of this.tree.inventory.query('packageName', name)) { if (!shouldAudit(node, this[_omit], this.filterSet)) { continue } // if not vulnerable by this advisory, keep searching if (!advisory.testVersion(node.version)) { continue } // we will have loaded the source already if this is a metavuln if (advisory.type === 'metavuln') { vuln.addVia(this.get(advisory.dependency)) } // already marked this one, no need to do it again if (vuln.nodes.has(node)) { continue } // haven't marked this one yet. get its dependents. vuln.nodes.add(node) for (const { from: dep, spec } of node.edgesIn) { if (dep.isTop && !vuln.topNodes.has(dep)) { this[_checkTopNode](dep, vuln, spec) } else { // calculate a metavuln, if necessary const calc = this.calculator.calculate(dep.packageName, advisory) // eslint-disable-next-line promise/always-return p.push(calc.then(meta => { // eslint-disable-next-line promise/always-return if (meta.testVersion(dep.version, spec)) { advisories.add(meta) } })) } } } await Promise.all(p) seen.add(k) } // make sure we actually got something. if not, remove it // this can happen if you are loading from a lockfile created by // npm v5, since it lists the current version of all deps, // rather than the range that is actually depended upon, // or if using --omit with the older audit endpoint. if (this.get(name).nodes.size === 0) { this.delete(name) continue } // if the vuln is valid, but THIS advisory doesn't apply to any of // the nodes it references, then remove it from the advisory list. // happens when using omit with old audit endpoint. for (const advisory of vuln.advisories) { const relevant = [...vuln.nodes] .some(n => advisory.testVersion(n.version)) if (!relevant) { vuln.deleteAdvisory(advisory) } } } timeEnd() } [_checkTopNode] (topNode, vuln, spec) { vuln.fixAvailable = this[_fixAvailable](topNode, vuln, spec) if (vuln.fixAvailable !== true) { // now we know the top node is vulnerable, and cannot be // upgraded out of the bad place without --force. But, there's // no need to add it to the actual vulns list, because nothing // depends on root. this.topVulns.set(vuln.name, vuln) vuln.topNodes.add(topNode) } } // check whether the top node is vulnerable. // check whether we can get out of the bad place with --force, and if // so, whether that update is SemVer Major [_fixAvailable] (topNode, vuln, spec) { // this will always be set to at least {name, versions:{}} const paku = vuln.packument if (!vuln.testSpec(spec)) { return true } // similarly, even if we HAVE a packument, but we're looking for it // somewhere other than the registry, and we got something vulnerable, // then we're stuck with it. const specObj = npa(spec) if (!specObj.registry) { return false } if (specObj.subSpec) { spec = specObj.subSpec.rawSpec } // We don't provide fixes for top nodes other than root, but we // still check to see if the node is fixable with a different version, // and if that is a semver major bump. try { const { _isSemVerMajor: isSemVerMajor, version, name, } = pickManifest(paku, spec, { ...this.options, before: null, avoid: vuln.range, avoidStrict: true, }) return { name, version, isSemVerMajor } } catch (er) { return false } } set () { throw new Error('do not call AuditReport.set() directly') } // convert a quick-audit into a bulk advisory listing static auditToBulk (report) { if (!report.advisories) { // tack on the report json where the response body would go throw Object.assign(new Error('Invalid advisory report'), { body: JSON.stringify(report), }) } const bulk = {} const { advisories } = report for (const advisory of Object.values(advisories)) { const { id, url, title, severity = 'high', vulnerable_versions = '*', module_name: name, } = advisory bulk[name] = bulk[name] || [] bulk[name].push({ id, url, title, severity, vulnerable_versions }) } return bulk } async [_getReport] () { // if we're not auditing, just return false if (this.options.audit === false || this.options.offline === true || this.tree.inventory.size === 1) { return null } const timeEnd = time.start('auditReport:getReport') try { try { // first try the super fast bulk advisory listing const body = prepareBulkData(this.tree, this[_omit], this.filterSet) log.silly('audit', 'bulk request', body) // no sense asking if we don't have anything to audit, // we know it'll be empty if (!Object.keys(body).length) { return null } const res = await fetch('/-/npm/v1/security/advisories/bulk', { ...this.options, registry: this.options.auditRegistry || this.options.registry, method: 'POST', gzip: true, body, }) return await res.json() } catch (er) { log.silly('audit', 'bulk request failed', String(er.body)) // that failed, try the quick audit endpoint const body = prepareData(this.tree, this.options) const res = await fetch('/-/npm/v1/security/audits/quick', { ...this.options, registry: this.options.auditRegistry || this.options.registry, method: 'POST', gzip: true, body, }) return AuditReport.auditToBulk(await res.json()) } } catch (er) { log.verbose('audit error', er) log.silly('audit error', String(er.body)) this.error = er return null } finally { timeEnd() } } } // return true if we should audit this one const shouldAudit = (node, omit, filterSet) => !node.version ? false : node.isRoot ? false : filterSet && filterSet.size !== 0 && !filterSet.has(node) ? false : omit.size === 0 ? true : !( // otherwise, just ensure we're not omitting this one node.dev && omit.has('dev') || node.optional && omit.has('optional') || node.devOptional && omit.has('dev') && omit.has('optional') || node.peer && omit.has('peer') ) const prepareBulkData = (tree, omit, filterSet) => { const payload = {} for (const name of tree.inventory.query('packageName')) { const set = new Set() for (const node of tree.inventory.query('packageName', name)) { if (!shouldAudit(node, omit, filterSet)) { continue } set.add(node.version) } if (set.size) { payload[name] = [...set] } } return payload } const prepareData = (tree, opts) => { const { npmVersion: npm_version } = opts const node_version = process.version const { platform, arch } = process const { NODE_ENV: node_env } = process.env const data = tree.meta.commit() // the legacy audit endpoint doesn't support any kind of pre-filtering // we just have to get the advisories and skip over them in the report return { name: data.name, version: data.version, requires: { ...(tree.package.devDependencies || {}), ...(tree.package.peerDependencies || {}), ...(tree.package.optionalDependencies || {}), ...(tree.package.dependencies || {}), }, dependencies: data.dependencies, metadata: { node_version, npm_version, platform, arch, node_env, }, } } module.exports = AuditReport PK]�\��).:.:$arborist/lib/arborist/load-actual.jsnu�[���// mix-in implementing the loadActual method const { relative, dirname, resolve, join, normalize } = require('node:path') const rpj = require('read-package-json-fast') const { readdirScoped } = require('@npmcli/fs') const { walkUp } = require('walk-up-path') const ancestorPath = require('common-ancestor-path') const treeCheck = require('../tree-check.js') const Shrinkwrap = require('../shrinkwrap.js') const calcDepFlags = require('../calc-dep-flags.js') const Node = require('../node.js') const Link = require('../link.js') const realpath = require('../realpath.js') // public symbols const _changePath = Symbol.for('_changePath') const _setWorkspaces = Symbol.for('setWorkspaces') const _rpcache = Symbol.for('realpathCache') const _stcache = Symbol.for('statCache') module.exports = cls => class ActualLoader extends cls { #actualTree // ensure when walking the tree that we don't call loadTree on the same // actual node more than one time. #actualTreeLoaded = new Set() #actualTreePromise // cache of nodes when loading the actualTree, so that we avoid loaded the // same node multiple times when symlinks attack. #cache = new Map() #filter // cache of link targets for setting fsParent links // We don't do fsParent as a magic getter/setter, because it'd be too costly // to keep up to date along the walk. // And, we know that it can ONLY be relevant when the node is a target of a // link, otherwise it'd be in a node_modules folder, so take advantage of // that to limit the scans later. #topNodes = new Set() #transplantFilter constructor (options) { super(options) // the tree of nodes on disk this.actualTree = options.actualTree // caches for cached realpath calls const cwd = process.cwd() // assume that the cwd is real enough for our purposes this[_rpcache] = new Map([[cwd, cwd]]) this[_stcache] = new Map() } // public method // TODO remove options param in next semver major async loadActual (options = {}) { // In the past this.actualTree was set as a promise that eventually // resolved, and overwrite this.actualTree with the resolved value. This // was a problem because virtually no other code expects this.actualTree to // be a promise. Instead we only set it once resolved, and also return it // from the promise so that it is what's returned from this function when // awaited. if (this.actualTree) { return this.actualTree } if (!this.#actualTreePromise) { // allow the user to set options on the ctor as well. // XXX: deprecate separate method options objects. options = { ...this.options, ...options } this.#actualTreePromise = this.#loadActual(options) .then(tree => { // reset all deps to extraneous prior to recalc if (!options.root) { for (const node of tree.inventory.values()) { node.extraneous = true } } // only reset root flags if we're not re-rooting, // otherwise leave as-is calcDepFlags(tree, !options.root) this.actualTree = treeCheck(tree) return this.actualTree }) } return this.#actualTreePromise } // return the promise so that we don't ever have more than one going at the // same time. This is so that buildIdealTree can default to the actualTree // if no shrinkwrap present, but reify() can still call buildIdealTree and // loadActual in parallel safely. async #loadActual (options) { // mostly realpath to throw if the root doesn't exist const { global, filter = () => true, root = null, transplantFilter = () => true, ignoreMissing = false, forceActual = false, } = options this.#filter = filter this.#transplantFilter = transplantFilter if (global) { const real = await realpath(this.path, this[_rpcache], this[_stcache]) const params = { path: this.path, realpath: real, pkg: {}, global, loadOverrides: true, } if (this.path === real) { this.#actualTree = this.#newNode(params) } else { this.#actualTree = await this.#newLink(params) } } else { // not in global mode, hidden lockfile is allowed, load root pkg too this.#actualTree = await this.#loadFSNode({ path: this.path, real: await realpath(this.path, this[_rpcache], this[_stcache]), loadOverrides: true, }) this.#actualTree.assertRootOverrides() // if forceActual is set, don't even try the hidden lockfile if (!forceActual) { // Note: hidden lockfile will be rejected if it's not the latest thing // in the folder, or if any of the entries in the hidden lockfile are // missing. const meta = await Shrinkwrap.load({ path: this.#actualTree.path, hiddenLockfile: true, resolveOptions: this.options, }) if (meta.loadedFromDisk) { this.#actualTree.meta = meta // have to load on a new Arborist object, so we don't assign // the virtualTree on this one! Also, the weird reference is because // we can't easily get a ref to Arborist in this module, without // creating a circular reference, since this class is a mixin used // to build up the Arborist class itself. await new this.constructor({ ...this.options }).loadVirtual({ root: this.#actualTree, }) await this[_setWorkspaces](this.#actualTree) this.#transplant(root) return this.#actualTree } } const meta = await Shrinkwrap.load({ path: this.#actualTree.path, lockfileVersion: this.options.lockfileVersion, resolveOptions: this.options, }) this.#actualTree.meta = meta } await this.#loadFSTree(this.#actualTree) await this[_setWorkspaces](this.#actualTree) // if there are workspace targets without Link nodes created, load // the targets, so that we know what they are. if (this.#actualTree.workspaces && this.#actualTree.workspaces.size) { const promises = [] for (const path of this.#actualTree.workspaces.values()) { if (!this.#cache.has(path)) { // workspace overrides use the root overrides const p = this.#loadFSNode({ path, root: this.#actualTree, useRootOverrides: true }) .then(node => this.#loadFSTree(node)) promises.push(p) } } await Promise.all(promises) } if (!ignoreMissing) { await this.#findMissingEdges() } // try to find a node that is the parent in a fs tree sense, but not a // node_modules tree sense, of any link targets. this allows us to // resolve deps that node will find, but a legacy npm view of the // world would not have noticed. for (const path of this.#topNodes) { const node = this.#cache.get(path) if (node && !node.parent && !node.fsParent) { for (const p of walkUp(dirname(path))) { if (this.#cache.has(p)) { node.fsParent = this.#cache.get(p) break } } } } this.#transplant(root) if (global) { // need to depend on the children, or else all of them // will end up being flagged as extraneous, since the // global root isn't a "real" project const tree = this.#actualTree const actualRoot = tree.isLink ? tree.target : tree const { dependencies = {} } = actualRoot.package for (const [name, kid] of actualRoot.children.entries()) { const def = kid.isLink ? `file:${kid.realpath.replace(/#/g, '%23')}` : '*' dependencies[name] = dependencies[name] || def } actualRoot.package = { ...actualRoot.package, dependencies } } return this.#actualTree } #transplant (root) { if (!root || root === this.#actualTree) { return } this.#actualTree[_changePath](root.path) for (const node of this.#actualTree.children.values()) { if (!this.#transplantFilter(node)) { node.root = null } } root.replace(this.#actualTree) for (const node of this.#actualTree.fsChildren) { node.root = this.#transplantFilter(node) ? root : null } this.#actualTree = root } async #loadFSNode ({ path, parent, real, root, loadOverrides, useRootOverrides }) { if (!real) { try { real = await realpath(path, this[_rpcache], this[_stcache]) } catch (error) { // if realpath fails, just provide a dummy error node return new Node({ error, path, realpath: path, parent, root, loadOverrides, }) } } const cached = this.#cache.get(path) let node // missing edges get a dummy node, assign the parent and return it if (cached && !cached.dummy) { cached.parent = parent return cached } else { const params = { installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, path, realpath: real, parent, root, loadOverrides, } try { const pkg = await rpj(join(real, 'package.json')) params.pkg = pkg if (useRootOverrides && root.overrides) { params.overrides = root.overrides.getNodeRule({ name: pkg.name, version: pkg.version }) } } catch (err) { params.error = err } // soldier on if read-package-json raises an error, passing it to the // Node which will attach it to its errors array (Link passes it along to // its target node) if (normalize(path) === real) { node = this.#newNode(params) } else { node = await this.#newLink(params) } } this.#cache.set(path, node) return node } #newNode (options) { // check it for an fsParent if it's a tree top. there's a decent chance // it'll get parented later, making the fsParent scan a no-op, but better // safe than sorry, since it's cheap. const { parent, realpath } = options if (!parent) { this.#topNodes.add(realpath) } return new Node(options) } async #newLink (options) { const { realpath } = options this.#topNodes.add(realpath) const target = this.#cache.get(realpath) const link = new Link({ ...options, target }) if (!target) { // Link set its target itself in this case this.#cache.set(realpath, link.target) // if a link target points at a node outside of the root tree's // node_modules hierarchy, then load that node as well. await this.#loadFSTree(link.target) } return link } async #loadFSTree (node) { const did = this.#actualTreeLoaded if (!node.isLink && !did.has(node.target.realpath)) { did.add(node.target.realpath) await this.#loadFSChildren(node.target) return Promise.all( [...node.target.children.entries()] .filter(([, kid]) => !did.has(kid.realpath)) .map(([, kid]) => this.#loadFSTree(kid)) ) } } // create child nodes for all the entries in node_modules // and attach them to the node as a parent async #loadFSChildren (node) { const nm = resolve(node.realpath, 'node_modules') try { const kids = await readdirScoped(nm).then(paths => paths.map(p => p.replace(/\\/g, '/'))) return Promise.all( // ignore . dirs and retired scoped package folders kids.filter(kid => !/^(@[^/]+\/)?\./.test(kid)) .filter(kid => this.#filter(node, kid)) .map(kid => this.#loadFSNode({ parent: node, path: resolve(nm, kid), }))) } catch { // error in the readdir is not fatal, just means no kids } } async #findMissingEdges () { // try to resolve any missing edges by walking up the directory tree, // checking for the package in each node_modules folder. stop at the // root directory. // The tricky move here is that we load a "dummy" node for the folder // containing the node_modules folder, so that it can be assigned as // the fsParent. It's a bad idea to *actually* load that full node, // because people sometimes develop in ~/projects/node_modules/... // so we'd end up loading a massive tree with lots of unrelated junk. const nmContents = new Map() const tree = this.#actualTree for (const node of tree.inventory.values()) { const ancestor = ancestorPath(node.realpath, this.path) const depPromises = [] for (const [name, edge] of node.edgesOut.entries()) { const notMissing = !edge.missing && !(edge.to && (edge.to.dummy || edge.to.parent !== node)) if (notMissing) { continue } // start the walk from the dirname, because we would have found // the dep in the loadFSTree step already if it was local. for (const p of walkUp(dirname(node.realpath))) { // only walk as far as the nearest ancestor // this keeps us from going into completely unrelated // places when a project is just missing something, but // allows for finding the transitive deps of link targets. // ie, if it has to go up and back out to get to the path // from the nearest common ancestor, we've gone too far. if (ancestor && /^\.\.(?:[\\/]|$)/.test(relative(ancestor, p))) { break } let entries if (!nmContents.has(p)) { entries = await readdirScoped(p + '/node_modules') .catch(() => []).then(paths => paths.map(p => p.replace(/\\/g, '/'))) nmContents.set(p, entries) } else { entries = nmContents.get(p) } if (!entries.includes(name)) { continue } let d if (!this.#cache.has(p)) { d = new Node({ path: p, root: node.root, dummy: true }) this.#cache.set(p, d) } else { d = this.#cache.get(p) } if (d.dummy) { // it's a placeholder, so likely would not have loaded this dep, // unless another dep in the tree also needs it. const depPath = normalize(`${p}/node_modules/${name}`) const cached = this.#cache.get(depPath) if (!cached || cached.dummy) { depPromises.push(this.#loadFSNode({ path: depPath, root: node.root, parent: d, }).then(node => this.#loadFSTree(node))) } } break } } await Promise.all(depPromises) } } } PK]�\=�?^<<)arborist/lib/arborist/isolated-reifier.jsnu�[���const _makeIdealGraph = Symbol('makeIdealGraph') const _createIsolatedTree = Symbol.for('createIsolatedTree') const _createBundledTree = Symbol('createBundledTree') const { mkdirSync } = require('node:fs') const pacote = require('pacote') const { join } = require('node:path') const { depth } = require('treeverse') const crypto = require('node:crypto') // cache complicated function results const memoize = (fn) => { const memo = new Map() return async function (arg) { const key = arg if (memo.has(key)) { return memo.get(key) } const result = {} memo.set(key, result) await fn(result, arg) return result } } module.exports = cls => class IsolatedReifier extends cls { /** * Create an ideal graph. * * An implementation of npm RFC-0042 * https://github.com/npm/rfcs/blob/main/accepted/0042-isolated-mode.md * * This entire file should be considered technical debt that will be resolved * with an Arborist refactor or rewrite. Embedded logic in Nodes and Links, * and the incremental state of building trees and reifying contains too many * assumptions to do a linked mode properly. * * Instead, this approach takes a tree built from build-ideal-tree, and * returns a new tree-like structure without the embedded logic of Node and * Link classes. * * Since the RFC requires leaving the package-lock in place, this approach * temporarily replaces the tree state for a couple of steps of reifying. * **/ async [_makeIdealGraph] (options) { /* Make sure that the ideal tree is build as the rest of * the algorithm depends on it. */ const bitOpt = { ...options, complete: false, } await this.buildIdealTree(bitOpt) const idealTree = this.idealTree this.rootNode = {} const root = this.rootNode this.counter = 0 // memoize to cache generating proxy Nodes this.externalProxyMemo = memoize(this.externalProxy.bind(this)) this.workspaceProxyMemo = memoize(this.workspaceProxy.bind(this)) root.external = [] root.isProjectRoot = true root.localLocation = idealTree.location root.localPath = idealTree.path root.workspaces = await Promise.all( Array.from(idealTree.fsChildren.values(), this.workspaceProxyMemo)) const processed = new Set() const queue = [idealTree, ...idealTree.fsChildren] while (queue.length !== 0) { const next = queue.pop() if (processed.has(next.location)) { continue } processed.add(next.location) next.edgesOut.forEach(e => { if (!e.to || (next.package.bundleDependencies || next.package.bundledDependencies || []).includes(e.to.name)) { return } queue.push(e.to) }) if (!next.isProjectRoot && !next.isWorkspace) { root.external.push(await this.externalProxyMemo(next)) } } await this.assignCommonProperties(idealTree, root) this.idealGraph = root } async workspaceProxy (result, node) { result.localLocation = node.location result.localPath = node.path result.isWorkspace = true result.resolved = node.resolved await this.assignCommonProperties(node, result) } async externalProxy (result, node) { await this.assignCommonProperties(node, result) if (node.hasShrinkwrap) { const dir = join( node.root.path, 'node_modules', '.store', `${node.name}@${node.version}` ) mkdirSync(dir, { recursive: true }) // TODO this approach feels wrong // and shouldn't be necessary for shrinkwraps await pacote.extract(node.resolved, dir, { ...this.options, resolved: node.resolved, integrity: node.integrity, }) const Arborist = this.constructor const arb = new Arborist({ ...this.options, path: dir }) await arb[_makeIdealGraph]({ dev: false }) this.rootNode.external.push(...arb.idealGraph.external) arb.idealGraph.external.forEach(e => { e.root = this.rootNode e.id = `${node.id}=>${e.id}` }) result.localDependencies = [] result.externalDependencies = arb.idealGraph.externalDependencies result.externalOptionalDependencies = arb.idealGraph.externalOptionalDependencies result.dependencies = [ ...result.externalDependencies, ...result.localDependencies, ...result.externalOptionalDependencies, ] } result.optional = node.optional result.resolved = node.resolved result.version = node.version } async assignCommonProperties (node, result) { function validEdgesOut (node) { return [...node.edgesOut.values()].filter(e => e.to && e.to.target && !(node.package.bundledDepenedencies || node.package.bundleDependencies || []).includes(e.to.name)) } const edges = validEdgesOut(node) const optionalDeps = edges.filter(e => e.optional).map(e => e.to.target) const nonOptionalDeps = edges.filter(e => !e.optional).map(e => e.to.target) result.localDependencies = await Promise.all(nonOptionalDeps.filter(n => n.isWorkspace).map(this.workspaceProxyMemo)) result.externalDependencies = await Promise.all(nonOptionalDeps.filter(n => !n.isWorkspace).map(this.externalProxyMemo)) result.externalOptionalDependencies = await Promise.all(optionalDeps.map(this.externalProxyMemo)) result.dependencies = [ ...result.externalDependencies, ...result.localDependencies, ...result.externalOptionalDependencies, ] result.root = this.rootNode result.id = this.counter++ result.name = node.name result.package = { ...node.package } result.package.bundleDependencies = undefined result.hasInstallScript = node.hasInstallScript } async [_createBundledTree] () { // TODO: make sure that idealTree object exists const idealTree = this.idealTree // TODO: test workspaces having bundled deps const queue = [] for (const [, edge] of idealTree.edgesOut) { if (edge.to && (idealTree.package.bundleDependencies || idealTree.package.bundledDependencies || []).includes(edge.to.name)) { queue.push({ from: idealTree, to: edge.to }) } } for (const child of idealTree.fsChildren) { for (const [, edge] of child.edgesOut) { if (edge.to && (child.package.bundleDependencies || child.package.bundledDependencies || []).includes(edge.to.name)) { queue.push({ from: child, to: edge.to }) } } } const processed = new Set() const nodes = new Map() const edges = [] while (queue.length !== 0) { const nextEdge = queue.pop() const key = `${nextEdge.from.location}=>${nextEdge.to.location}` // should be impossible, unless bundled is duped /* istanbul ignore next */ if (processed.has(key)) { continue } processed.add(key) const from = nextEdge.from if (!from.isRoot && !from.isWorkspace) { nodes.set(from.location, { location: from.location, resolved: from.resolved, name: from.name, optional: from.optional, pkg: { ...from.package, bundleDependencies: undefined } }) } const to = nextEdge.to nodes.set(to.location, { location: to.location, resolved: to.resolved, name: to.name, optional: to.optional, pkg: { ...to.package, bundleDependencies: undefined } }) edges.push({ from: from.isRoot ? 'root' : from.location, to: to.location }) to.edgesOut.forEach(e => { // an edge out should always have a to /* istanbul ignore else */ if (e.to) { queue.push({ from: e.from, to: e.to }) } }) } return { edges, nodes } } async [_createIsolatedTree] () { await this[_makeIdealGraph](this.options) const proxiedIdealTree = this.idealGraph const bundledTree = await this[_createBundledTree]() const treeHash = (startNode) => { // generate short hash based on the dependency tree // starting at this node const deps = [] const branch = [] depth({ tree: startNode, getChildren: node => node.dependencies, filter: node => node, visit: node => { branch.push(`${node.name}@${node.version}`) deps.push(`${branch.join('->')}::${node.resolved}`) }, leave: () => { branch.pop() }, }) deps.sort() return crypto.createHash('shake256', { outputLength: 16 }) .update(deps.join(',')) .digest('base64') // Node v14 doesn't support base64url .replace(/\+/g, '-') .replace(/\//g, '_') .replace(/=+$/m, '') } const getKey = (idealTreeNode) => { return `${idealTreeNode.name}@${idealTreeNode.version}-${treeHash(idealTreeNode)}` } const root = { fsChildren: [], integrity: null, inventory: new Map(), isLink: false, isRoot: true, binPaths: [], edgesIn: new Set(), edgesOut: new Map(), hasShrinkwrap: false, parent: null, // TODO: we should probably not reference this.idealTree resolved: this.idealTree.resolved, isTop: true, path: proxiedIdealTree.root.localPath, realpath: proxiedIdealTree.root.localPath, package: proxiedIdealTree.root.package, meta: { loadedFromDisk: false }, global: false, isProjectRoot: true, children: [], } // root.inventory.set('', t) // root.meta = this.idealTree.meta // TODO We should mock better the inventory object because it is used by audit-report.js ... maybe root.inventory.query = () => { return [] } const processed = new Set() proxiedIdealTree.workspaces.forEach(c => { const workspace = { edgesIn: new Set(), edgesOut: new Map(), children: [], hasInstallScript: c.hasInstallScript, binPaths: [], package: c.package, location: c.localLocation, path: c.localPath, realpath: c.localPath, resolved: c.resolved, } root.fsChildren.push(workspace) root.inventory.set(workspace.location, workspace) }) const generateChild = (node, location, pkg, inStore) => { const newChild = { global: false, globalTop: false, isProjectRoot: false, isTop: false, location, name: node.name, optional: node.optional, top: { path: proxiedIdealTree.root.localPath }, children: [], edgesIn: new Set(), edgesOut: new Map(), binPaths: [], fsChildren: [], /* istanbul ignore next -- emulate Node */ getBundler () { return null }, hasShrinkwrap: false, inDepBundle: false, integrity: null, isLink: false, isRoot: false, isInStore: inStore, path: join(proxiedIdealTree.root.localPath, location), realpath: join(proxiedIdealTree.root.localPath, location), resolved: node.resolved, version: pkg.version, package: pkg, } newChild.target = newChild root.children.push(newChild) root.inventory.set(newChild.location, newChild) } proxiedIdealTree.external.forEach(c => { const key = getKey(c) if (processed.has(key)) { return } processed.add(key) const location = join('node_modules', '.store', key, 'node_modules', c.name) generateChild(c, location, c.package, true) }) bundledTree.nodes.forEach(node => { generateChild(node, node.location, node.pkg, false) }) bundledTree.edges.forEach(e => { const from = e.from === 'root' ? root : root.inventory.get(e.from) const to = root.inventory.get(e.to) // Maybe optional should be propagated from the original edge const edge = { optional: false, from, to } from.edgesOut.set(to.name, edge) to.edgesIn.add(edge) }) const memo = new Set() function processEdges (node, externalEdge) { externalEdge = !!externalEdge const key = getKey(node) if (memo.has(key)) { return } memo.add(key) let from, nmFolder if (externalEdge) { const fromLocation = join('node_modules', '.store', key, 'node_modules', node.name) from = root.children.find(c => c.location === fromLocation) nmFolder = join('node_modules', '.store', key, 'node_modules') } else { from = node.isProjectRoot ? root : root.fsChildren.find(c => c.location === node.localLocation) nmFolder = join(node.localLocation, 'node_modules') } const processDeps = (dep, optional, external) => { optional = !!optional external = !!external const location = join(nmFolder, dep.name) const binNames = dep.package.bin && Object.keys(dep.package.bin) || [] const toKey = getKey(dep) let target if (external) { const toLocation = join('node_modules', '.store', toKey, 'node_modules', dep.name) target = root.children.find(c => c.location === toLocation) } else { target = root.fsChildren.find(c => c.location === dep.localLocation) } // TODO: we should no-op is an edge has already been created with the same fromKey and toKey binNames.forEach(bn => { target.binPaths.push(join(from.realpath, 'node_modules', '.bin', bn)) }) const link = { global: false, globalTop: false, isProjectRoot: false, edgesIn: new Set(), edgesOut: new Map(), binPaths: [], isTop: false, optional, location: location, path: join(dep.root.localPath, nmFolder, dep.name), realpath: target.path, name: toKey, resolved: dep.resolved, top: { path: dep.root.localPath }, children: [], fsChildren: [], isLink: true, isStoreLink: true, isRoot: false, package: { _id: 'abc', bundleDependencies: undefined, deprecated: undefined, bin: target.package.bin, scripts: dep.package.scripts }, target, } const newEdge1 = { optional, from, to: link } from.edgesOut.set(dep.name, newEdge1) link.edgesIn.add(newEdge1) const newEdge2 = { optional: false, from: link, to: target } link.edgesOut.set(dep.name, newEdge2) target.edgesIn.add(newEdge2) root.children.push(link) } for (const dep of node.localDependencies) { processEdges(dep, false) // nonOptional, local processDeps(dep, false, false) } for (const dep of node.externalDependencies) { processEdges(dep, true) // nonOptional, external processDeps(dep, false, true) } for (const dep of node.externalOptionalDependencies) { processEdges(dep, true) // optional, external processDeps(dep, true, true) } } processEdges(proxiedIdealTree, false) for (const node of proxiedIdealTree.workspaces) { processEdges(node, false) } root.children.forEach(c => c.parent = root) root.children.forEach(c => c.root = root) root.root = root root.target = root return root } } PK]�\���[Z.Z. arborist/lib/arborist/rebuild.jsnu�[���// Arborist.rebuild({path = this.path}) will do all the binlinks and // bundle building needed. Called by reify, and by `npm rebuild`. const localeCompare = require('@isaacs/string-locale-compare')('en') const { depth: dfwalk } = require('treeverse') const promiseAllRejectLate = require('promise-all-reject-late') const rpj = require('read-package-json-fast') const binLinks = require('bin-links') const runScript = require('@npmcli/run-script') const { callLimit: promiseCallLimit } = require('promise-call-limit') const { resolve } = require('node:path') const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp') const { log, time } = require('proc-log') const boolEnv = b => b ? '1' : '' const sortNodes = (a, b) => (a.depth - b.depth) || localeCompare(a.path, b.path) const _checkBins = Symbol.for('checkBins') // defined by reify mixin const _handleOptionalFailure = Symbol.for('handleOptionalFailure') const _trashList = Symbol.for('trashList') module.exports = cls => class Builder extends cls { #doHandleOptionalFailure #oldMeta = null #queues constructor (options) { super(options) this.scriptsRun = new Set() this.#resetQueues() } async rebuild ({ nodes, handleOptionalFailure = false } = {}) { // nothing to do if we're not building anything! if (this.options.ignoreScripts && !this.options.binLinks) { return } // when building for the first time, as part of reify, we ignore // failures in optional nodes, and just delete them. however, when // running JUST a rebuild, we treat optional failures as real fails this.#doHandleOptionalFailure = handleOptionalFailure if (!nodes) { nodes = await this.#loadDefaultNodes() } // separates links nodes so that it can run // prepare scripts and link bins in the expected order const timeEnd = time.start('build') const { depNodes, linkNodes, } = this.#retrieveNodesByType(nodes) // build regular deps await this.#build(depNodes, {}) // build link deps if (linkNodes.size) { this.#resetQueues() await this.#build(linkNodes, { type: 'links' }) } timeEnd() } // if we don't have a set of nodes, then just rebuild // the actual tree on disk. async #loadDefaultNodes () { let nodes const tree = await this.loadActual() let filterSet if (!this.options.workspacesEnabled) { filterSet = this.excludeWorkspacesDependencySet(tree) nodes = tree.inventory.filter(node => filterSet.has(node) || node.isProjectRoot ) } else if (this.options.workspaces.length) { filterSet = this.workspaceDependencySet( tree, this.options.workspaces, this.options.includeWorkspaceRoot ) nodes = tree.inventory.filter(node => filterSet.has(node)) } else { nodes = tree.inventory.values() } return nodes } #retrieveNodesByType (nodes) { const depNodes = new Set() const linkNodes = new Set() const storeNodes = new Set() for (const node of nodes) { if (node.isStoreLink) { storeNodes.add(node) } else if (node.isLink) { linkNodes.add(node) } else { depNodes.add(node) } } // Make sure that store linked nodes are processed last. // We can't process store links separately or else lifecycle scripts on // standard nodes might not have bin links yet. for (const node of storeNodes) { depNodes.add(node) } // deduplicates link nodes and their targets, avoids // calling lifecycle scripts twice when running `npm rebuild` // ref: https://github.com/npm/cli/issues/2905 // // we avoid doing so if global=true since `bin-links` relies // on having the target nodes available in global mode. if (!this.options.global) { for (const node of linkNodes) { depNodes.delete(node.target) } } return { depNodes, linkNodes, } } #resetQueues () { this.#queues = { preinstall: [], install: [], postinstall: [], prepare: [], bin: [], } } async #build (nodes, { type = 'deps' }) { const timeEnd = time.start(`build:${type}`) await this.#buildQueues(nodes) if (!this.options.ignoreScripts) { await this.#runScripts('preinstall') } // links should run prepare scripts and only link bins after that if (type === 'links') { await this.#runScripts('prepare') } if (this.options.binLinks) { await this.#linkAllBins() } if (!this.options.ignoreScripts) { await this.#runScripts('install') await this.#runScripts('postinstall') } timeEnd() } async #buildQueues (nodes) { const timeEnd = time.start('build:queue') const set = new Set() const promises = [] for (const node of nodes) { promises.push(this.#addToBuildSet(node, set)) // if it has bundle deps, add those too, if rebuildBundle if (this.options.rebuildBundle !== false) { const bd = node.package.bundleDependencies if (bd && bd.length) { dfwalk({ tree: node, leave: node => promises.push(this.#addToBuildSet(node, set)), getChildren: node => [...node.children.values()], filter: node => node.inBundle, }) } } } await promiseAllRejectLate(promises) // now sort into the queues for the 4 things we have to do // run in the same predictable order that buildIdealTree uses // there's no particular reason for doing it in this order rather // than another, but sorting *somehow* makes it consistent. const queue = [...set].sort(sortNodes) for (const node of queue) { const { package: { bin, scripts = {} } } = node.target const { preinstall, install, postinstall, prepare } = scripts const tests = { bin, preinstall, install, postinstall, prepare } for (const [key, has] of Object.entries(tests)) { if (has) { this.#queues[key].push(node) } } } timeEnd() } async [_checkBins] (node) { // if the node is a global top, and we're not in force mode, then // any existing bins need to either be missing, or a symlink into // the node path. Otherwise a package can have a preinstall script // that unlinks something, to allow them to silently overwrite system // binaries, which is unsafe and insecure. if (!node.globalTop || this.options.force) { return } const { path, package: pkg } = node await binLinks.checkBins({ pkg, path, top: true, global: true }) } async #addToBuildSet (node, set, refreshed = false) { if (set.has(node)) { return } if (this.#oldMeta === null) { const { root: { meta } } = node this.#oldMeta = meta && meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2) } const { package: pkg, hasInstallScript } = node.target const { gypfile, bin, scripts = {} } = pkg const { preinstall, install, postinstall, prepare } = scripts const anyScript = preinstall || install || postinstall || prepare if (!refreshed && !anyScript && (hasInstallScript || this.#oldMeta)) { // we either have an old metadata (and thus might have scripts) // or we have an indication that there's install scripts (but // don't yet know what they are) so we have to load the package.json // from disk to see what the deal is. Failure here just means // no scripts to add, probably borked package.json. // add to the set then remove while we're reading the pj, so we // don't accidentally hit it multiple times. set.add(node) const pkg = await rpj(node.path + '/package.json').catch(() => ({})) set.delete(node) const { scripts = {} } = pkg node.package.scripts = scripts return this.#addToBuildSet(node, set, true) } // Rebuild node-gyp dependencies lacking an install or preinstall script // note that 'scripts' might be missing entirely, and the package may // set gypfile:false to avoid this automatic detection. const isGyp = gypfile !== false && !install && !preinstall && await isNodeGypPackage(node.path) if (bin || preinstall || install || postinstall || prepare || isGyp) { if (bin) { await this[_checkBins](node) } if (isGyp) { scripts.install = defaultGypInstallScript node.package.scripts = scripts } set.add(node) } } async #runScripts (event) { const queue = this.#queues[event] if (!queue.length) { return } const timeEnd = time.start(`build:run:${event}`) const stdio = this.options.foregroundScripts ? 'inherit' : 'pipe' const limit = this.options.foregroundScripts ? 1 : undefined await promiseCallLimit(queue.map(node => async () => { const { path, integrity, resolved, optional, peer, dev, devOptional, package: pkg, location, isStoreLink, } = node.target // skip any that we know we'll be deleting // or storeLinks if (this[_trashList].has(path) || isStoreLink) { return } const timeEndLocation = time.start(`build:run:${event}:${location}`) log.info('run', pkg._id, event, location, pkg.scripts[event]) const env = { npm_package_resolved: resolved, npm_package_integrity: integrity, npm_package_json: resolve(path, 'package.json'), npm_package_optional: boolEnv(optional), npm_package_dev: boolEnv(dev), npm_package_peer: boolEnv(peer), npm_package_dev_optional: boolEnv(devOptional && !dev && !optional), } const runOpts = { event, path, pkg, stdio, env, scriptShell: this.options.scriptShell, } const p = runScript(runOpts).catch(er => { const { code, signal } = er log.info('run', pkg._id, event, { code, signal }) throw er }).then(({ args, code, signal, stdout, stderr }) => { this.scriptsRun.add({ pkg, path, event, // I do not know why this needs to be on THIS line but refactoring // this function would be quite a process // eslint-disable-next-line promise/always-return cmd: args && args[args.length - 1], env, code, signal, stdout, stderr, }) log.info('run', pkg._id, event, { code, signal }) }) await (this.#doHandleOptionalFailure ? this[_handleOptionalFailure](node, p) : p) timeEndLocation() }), { limit }) timeEnd() } async #linkAllBins () { const queue = this.#queues.bin if (!queue.length) { return } const timeEnd = time.start('build:link') const promises = [] // sort the queue by node path, so that the module-local collision // detector in bin-links will always resolve the same way. for (const node of queue.sort(sortNodes)) { // TODO these run before they're awaited promises.push(this.#createBinLinks(node)) } await promiseAllRejectLate(promises) timeEnd() } async #createBinLinks (node) { if (this[_trashList].has(node.path)) { return } const timeEnd = time.start(`build:link:${node.location}`) const p = binLinks({ pkg: node.package, path: node.path, top: !!(node.isTop || node.globalTop), force: this.options.force, global: !!node.globalTop, }) await (this.#doHandleOptionalFailure ? this[_handleOptionalFailure](node, p) : p) timeEnd() } } PK]�\��X��"�"arborist/lib/arborist/index.jsnu�[���// The arborist manages three trees: // - actual // - virtual // - ideal // // The actual tree is what's present on disk in the node_modules tree // and elsewhere that links may extend. // // The virtual tree is loaded from metadata (package.json and lock files). // // The ideal tree is what we WANT that actual tree to become. This starts // with the virtual tree, and then applies the options requesting // add/remove/update actions. // // To reify a tree, we calculate a diff between the ideal and actual trees, // and then turn the actual tree into the ideal tree by taking the actions // required. At the end of the reification process, the actualTree is // updated to reflect the changes. // // Each tree has an Inventory at the root. Shrinkwrap is tracked by Arborist // instance. It always refers to the actual tree, but is updated (and written // to disk) on reification. // Each of the mixin "classes" adds functionality, but are not dependent on // constructor call order. So, we just load them in an array, and build up // the base class, so that the overall voltron class is easier to test and // cover, and separation of concerns can be maintained. const { resolve } = require('node:path') const { homedir } = require('node:os') const { depth } = require('treeverse') const mapWorkspaces = require('@npmcli/map-workspaces') const { log, time } = require('proc-log') const { saveTypeMap } = require('../add-rm-pkg-deps.js') const AuditReport = require('../audit-report.js') const relpath = require('../relpath.js') const PackumentCache = require('../packument-cache.js') const mixins = [ require('../tracker.js'), require('./build-ideal-tree.js'), require('./load-actual.js'), require('./load-virtual.js'), require('./rebuild.js'), require('./reify.js'), require('./isolated-reifier.js'), ] const _setWorkspaces = Symbol.for('setWorkspaces') const Base = mixins.reduce((a, b) => b(a), require('node:events')) // if it's 1, 2, or 3, set it explicitly that. // if undefined or null, set it null // otherwise, throw. const lockfileVersion = lfv => { if (lfv === 1 || lfv === 2 || lfv === 3) { return lfv } if (lfv === undefined || lfv === null) { return null } throw new TypeError('Invalid lockfileVersion config: ' + lfv) } class Arborist extends Base { constructor (options = {}) { const timeEnd = time.start('arborist:ctor') super(options) this.options = { nodeVersion: process.version, ...options, Arborist: this.constructor, binLinks: 'binLinks' in options ? !!options.binLinks : true, cache: options.cache || `${homedir()}/.npm/_cacache`, dryRun: !!options.dryRun, formatPackageLock: 'formatPackageLock' in options ? !!options.formatPackageLock : true, force: !!options.force, global: !!options.global, ignoreScripts: !!options.ignoreScripts, installStrategy: options.global ? 'shallow' : (options.installStrategy ? options.installStrategy : 'hoisted'), lockfileVersion: lockfileVersion(options.lockfileVersion), packageLockOnly: !!options.packageLockOnly, packumentCache: options.packumentCache || new PackumentCache(), path: options.path || '.', rebuildBundle: 'rebuildBundle' in options ? !!options.rebuildBundle : true, replaceRegistryHost: options.replaceRegistryHost, savePrefix: 'savePrefix' in options ? options.savePrefix : '^', scriptShell: options.scriptShell, workspaces: options.workspaces || [], workspacesEnabled: options.workspacesEnabled !== false, } // TODO we only ever look at this.options.replaceRegistryHost, not // this.replaceRegistryHost. Defaulting needs to be written back to // this.options to work properly this.replaceRegistryHost = this.options.replaceRegistryHost = (!this.options.replaceRegistryHost || this.options.replaceRegistryHost === 'npmjs') ? 'registry.npmjs.org' : this.options.replaceRegistryHost if (options.saveType && !saveTypeMap.get(options.saveType)) { throw new Error(`Invalid saveType ${options.saveType}`) } this.cache = resolve(this.options.cache) this.diff = null this.path = resolve(this.options.path) timeEnd() } // TODO: We should change these to static functions instead // of methods for the next major version // Get the actual nodes corresponding to a root node's child workspaces, // given a list of workspace names. workspaceNodes (tree, workspaces) { const wsMap = tree.workspaces if (!wsMap) { log.warn('workspaces', 'filter set, but no workspaces present') return [] } const nodes = [] for (const name of workspaces) { const path = wsMap.get(name) if (!path) { log.warn('workspaces', `${name} in filter set, but not in workspaces`) continue } const loc = relpath(tree.realpath, path) const node = tree.inventory.get(loc) if (!node) { log.warn('workspaces', `${name} in filter set, but no workspace folder present`) continue } nodes.push(node) } return nodes } // returns a set of workspace nodes and all their deps // TODO why is includeWorkspaceRoot a param? // TODO why is workspaces a param? workspaceDependencySet (tree, workspaces, includeWorkspaceRoot) { const wsNodes = this.workspaceNodes(tree, workspaces) if (includeWorkspaceRoot) { for (const edge of tree.edgesOut.values()) { if (edge.type !== 'workspace' && edge.to) { wsNodes.push(edge.to) } } } const wsDepSet = new Set(wsNodes) const extraneous = new Set() for (const node of wsDepSet) { for (const edge of node.edgesOut.values()) { const dep = edge.to if (dep) { wsDepSet.add(dep) if (dep.isLink) { wsDepSet.add(dep.target) } } } for (const child of node.children.values()) { if (child.extraneous) { extraneous.add(child) } } } for (const extra of extraneous) { wsDepSet.add(extra) } return wsDepSet } // returns a set of root dependencies, excluding dependencies that are // exclusively workspace dependencies excludeWorkspacesDependencySet (tree) { const rootDepSet = new Set() depth({ tree, visit: node => { for (const { to } of node.edgesOut.values()) { if (!to || to.isWorkspace) { continue } for (const edgeIn of to.edgesIn.values()) { if (edgeIn.from.isRoot || rootDepSet.has(edgeIn.from)) { rootDepSet.add(to) } } } return node }, filter: node => node, getChildren: (node, tree) => [...tree.edgesOut.values()].map(edge => edge.to), }) return rootDepSet } async [_setWorkspaces] (node) { const workspaces = await mapWorkspaces({ cwd: node.path, pkg: node.package, }) if (node && workspaces.size) { node.workspaces = workspaces } return node } async audit (options = {}) { this.addTracker('audit') if (this.options.global) { throw Object.assign( new Error('`npm audit` does not support testing globals'), { code: 'EAUDITGLOBAL' } ) } // allow the user to set options on the ctor as well. // XXX: deprecate separate method options objects. options = { ...this.options, ...options } const timeEnd = time.start('audit') let tree if (options.packageLock === false) { // build ideal tree await this.loadActual(options) await this.buildIdealTree() tree = this.idealTree } else { tree = await this.loadVirtual() } if (this.options.workspaces.length) { options.filterSet = this.workspaceDependencySet( tree, this.options.workspaces, this.options.includeWorkspaceRoot ) } if (!options.workspacesEnabled) { options.filterSet = this.excludeWorkspacesDependencySet(tree) } this.auditReport = await AuditReport.load(tree, options) const ret = options.fix ? this.reify(options) : this.auditReport timeEnd() this.finishTracker('audit') return ret } async dedupe (options = {}) { // allow the user to set options on the ctor as well. // XXX: deprecate separate method options objects. options = { ...this.options, ...options } const tree = await this.loadVirtual().catch(() => this.loadActual()) const names = [] for (const name of tree.inventory.query('name')) { if (tree.inventory.query('name', name).size > 1) { names.push(name) } } return this.reify({ ...options, preferDedupe: true, update: { names }, }) } } module.exports = Arborist PK]�\k�Ѽ~%~%%arborist/lib/arborist/load-virtual.jsnu�[���// mixin providing the loadVirtual method const mapWorkspaces = require('@npmcli/map-workspaces') const { resolve } = require('node:path') const nameFromFolder = require('@npmcli/name-from-folder') const consistentResolve = require('../consistent-resolve.js') const Shrinkwrap = require('../shrinkwrap.js') const Node = require('../node.js') const Link = require('../link.js') const relpath = require('../relpath.js') const calcDepFlags = require('../calc-dep-flags.js') const rpj = require('read-package-json-fast') const treeCheck = require('../tree-check.js') const flagsSuspect = Symbol.for('flagsSuspect') const setWorkspaces = Symbol.for('setWorkspaces') module.exports = cls => class VirtualLoader extends cls { #rootOptionProvided constructor (options) { super(options) // the virtual tree we load from a shrinkwrap this.virtualTree = options.virtualTree this[flagsSuspect] = false } // public method async loadVirtual (options = {}) { if (this.virtualTree) { return this.virtualTree } // allow the user to set reify options on the ctor as well. // XXX: deprecate separate reify() options object. options = { ...this.options, ...options } if (options.root && options.root.meta) { await this.#loadFromShrinkwrap(options.root.meta, options.root) return treeCheck(this.virtualTree) } const s = await Shrinkwrap.load({ path: this.path, lockfileVersion: this.options.lockfileVersion, resolveOptions: this.options, }) if (!s.loadedFromDisk && !options.root) { const er = new Error('loadVirtual requires existing shrinkwrap file') throw Object.assign(er, { code: 'ENOLOCK' }) } // when building the ideal tree, we pass in a root node to this function // otherwise, load it from the root package json or the lockfile const { root = await this.#loadRoot(s), } = options this.#rootOptionProvided = options.root await this.#loadFromShrinkwrap(s, root) root.assertRootOverrides() return treeCheck(this.virtualTree) } async #loadRoot (s) { const pj = this.path + '/package.json' const pkg = await rpj(pj).catch(() => s.data.packages['']) || {} return this[setWorkspaces](this.#loadNode('', pkg, true)) } async #loadFromShrinkwrap (s, root) { if (!this.#rootOptionProvided) { // root is never any of these things, but might be a brand new // baby Node object that never had its dep flags calculated. root.extraneous = false root.dev = false root.optional = false root.devOptional = false root.peer = false } else { this[flagsSuspect] = true } this.#checkRootEdges(s, root) root.meta = s this.virtualTree = root const { links, nodes } = this.#resolveNodes(s, root) await this.#resolveLinks(links, nodes) if (!(s.originalLockfileVersion >= 2)) { this.#assignBundles(nodes) } if (this[flagsSuspect]) { // reset all dep flags // can't use inventory here, because virtualTree might not be root for (const node of nodes.values()) { if (node.isRoot || node === this.#rootOptionProvided) { continue } node.extraneous = true node.dev = true node.optional = true node.devOptional = true node.peer = true } calcDepFlags(this.virtualTree, !this.#rootOptionProvided) } return root } // check the lockfile deps, and see if they match. if they do not // then we have to reset dep flags at the end. for example, if the // user manually edits their package.json file, then we need to know // that the idealTree is no longer entirely trustworthy. #checkRootEdges (s, root) { // loaded virtually from tree, no chance of being out of sync // ancient lockfiles are critically damaged by this process, // so we need to just hope for the best in those cases. if (!s.loadedFromDisk || s.ancientLockfile) { return } const lock = s.get('') const prod = lock.dependencies || {} const dev = lock.devDependencies || {} const optional = lock.optionalDependencies || {} const peer = lock.peerDependencies || {} const peerOptional = {} if (lock.peerDependenciesMeta) { for (const [name, meta] of Object.entries(lock.peerDependenciesMeta)) { if (meta.optional && peer[name] !== undefined) { peerOptional[name] = peer[name] delete peer[name] } } } for (const name of Object.keys(optional)) { delete prod[name] } const lockWS = {} const workspaces = mapWorkspaces.virtual({ cwd: this.path, lockfile: s.data, }) for (const [name, path] of workspaces.entries()) { lockWS[name] = `file:${path.replace(/#/g, '%23')}` } // Should rootNames exclude optional? const rootNames = new Set(root.edgesOut.keys()) const lockByType = ({ dev, optional, peer, peerOptional, prod, workspace: lockWS }) // Find anything in shrinkwrap deps that doesn't match root's type or spec for (const type in lockByType) { const deps = lockByType[type] for (const name in deps) { const edge = root.edgesOut.get(name) if (!edge || edge.type !== type || edge.spec !== deps[name]) { return this[flagsSuspect] = true } rootNames.delete(name) } } // Something was in root that's not accounted for in shrinkwrap if (rootNames.size) { return this[flagsSuspect] = true } } // separate out link metadatas, and create Node objects for nodes #resolveNodes (s, root) { const links = new Map() const nodes = new Map([['', root]]) for (const [location, meta] of Object.entries(s.data.packages)) { // skip the root because we already got it if (!location) { continue } if (meta.link) { links.set(location, meta) } else { nodes.set(location, this.#loadNode(location, meta)) } } return { links, nodes } } // links is the set of metadata, and nodes is the map of non-Link nodes // Set the targets to nodes in the set, if we have them (we might not) async #resolveLinks (links, nodes) { for (const [location, meta] of links.entries()) { const targetPath = resolve(this.path, meta.resolved) const targetLoc = relpath(this.path, targetPath) const target = nodes.get(targetLoc) const link = this.#loadLink(location, targetLoc, target, meta) nodes.set(location, link) nodes.set(targetLoc, link.target) // we always need to read the package.json for link targets // outside node_modules because they can be changed by the local user if (!link.target.parent) { const pj = link.realpath + '/package.json' const pkg = await rpj(pj).catch(() => null) if (pkg) { link.target.package = pkg } } } } #assignBundles (nodes) { for (const [location, node] of nodes) { // Skip assignment of parentage for the root package if (!location || node.isLink && !node.target.location) { continue } const { name, parent, package: { inBundle } } = node if (!parent) { continue } // read inBundle from package because 'package' here is // actually a v2 lockfile metadata entry. // If the *parent* is also bundled, though, or if the parent has // no dependency on it, then we assume that it's being pulled in // just by virtue of its parent or a transitive dep being bundled. const { package: ppkg } = parent const { inBundle: parentBundled } = ppkg if (inBundle && !parentBundled && parent.edgesOut.has(node.name)) { if (!ppkg.bundleDependencies) { ppkg.bundleDependencies = [name] } else { ppkg.bundleDependencies.push(name) } } } } #loadNode (location, sw, loadOverrides) { const p = this.virtualTree ? this.virtualTree.realpath : this.path const path = resolve(p, location) // shrinkwrap doesn't include package name unless necessary if (!sw.name) { sw.name = nameFromFolder(path) } const dev = sw.dev const optional = sw.optional const devOptional = dev || optional || sw.devOptional const peer = sw.peer const node = new Node({ installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, root: this.virtualTree, path, realpath: path, integrity: sw.integrity, resolved: consistentResolve(sw.resolved, this.path, path), pkg: sw, hasShrinkwrap: sw.hasShrinkwrap, dev, optional, devOptional, peer, loadOverrides, }) // cast to boolean because they're undefined in the lock file when false node.extraneous = !!sw.extraneous node.devOptional = !!(sw.devOptional || sw.dev || sw.optional) node.peer = !!sw.peer node.optional = !!sw.optional node.dev = !!sw.dev return node } #loadLink (location, targetLoc, target) { const path = resolve(this.path, location) const link = new Link({ installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, path, realpath: resolve(this.path, targetLoc), target, pkg: target && target.package, }) link.extraneous = target.extraneous link.devOptional = target.devOptional link.peer = target.peer link.optional = target.optional link.dev = target.dev return link } } PK]�\�?������arborist/lib/arborist/reify.jsnu�[���// mixin implementing the reify method const onExit = require('../signal-handling.js') const pacote = require('pacote') const AuditReport = require('../audit-report.js') const { subset, intersects } = require('semver') const npa = require('npm-package-arg') const semver = require('semver') const debug = require('../debug.js') const { walkUp } = require('walk-up-path') const { log, time } = require('proc-log') const hgi = require('hosted-git-info') const rpj = require('read-package-json-fast') const { dirname, resolve, relative, join } = require('node:path') const { depth: dfwalk } = require('treeverse') const { lstat, mkdir, rm, symlink, } = require('node:fs/promises') const { moveFile } = require('@npmcli/fs') const PackageJson = require('@npmcli/package-json') const packageContents = require('@npmcli/installed-package-contents') const runScript = require('@npmcli/run-script') const { checkEngine, checkPlatform } = require('npm-install-checks') const treeCheck = require('../tree-check.js') const relpath = require('../relpath.js') const Diff = require('../diff.js') const retirePath = require('../retire-path.js') const promiseAllRejectLate = require('promise-all-reject-late') const { callLimit: promiseCallLimit } = require('promise-call-limit') const optionalSet = require('../optional-set.js') const calcDepFlags = require('../calc-dep-flags.js') const { saveTypeMap, hasSubKey } = require('../add-rm-pkg-deps.js') const Shrinkwrap = require('../shrinkwrap.js') const { defaultLockfileVersion } = Shrinkwrap // Part of steps (steps need refactoring before we can do anything about these) const _retireShallowNodes = Symbol.for('retireShallowNodes') const _loadBundlesAndUpdateTrees = Symbol.for('loadBundlesAndUpdateTrees') const _submitQuickAudit = Symbol('submitQuickAudit') const _addOmitsToTrashList = Symbol('addOmitsToTrashList') const _unpackNewModules = Symbol.for('unpackNewModules') const _build = Symbol.for('build') // shared by rebuild mixin const _trashList = Symbol.for('trashList') const _handleOptionalFailure = Symbol.for('handleOptionalFailure') const _loadTrees = Symbol.for('loadTrees') // defined by rebuild mixin const _checkBins = Symbol.for('checkBins') // shared symbols for swapping out when testing // TODO tests should not be this deep into internals const _diffTrees = Symbol.for('diffTrees') const _createSparseTree = Symbol.for('createSparseTree') const _loadShrinkwrapsAndUpdateTrees = Symbol.for('loadShrinkwrapsAndUpdateTrees') const _reifyNode = Symbol.for('reifyNode') const _updateAll = Symbol.for('updateAll') const _updateNames = Symbol.for('updateNames') const _moveContents = Symbol.for('moveContents') const _moveBackRetiredUnchanged = Symbol.for('moveBackRetiredUnchanged') const _removeTrash = Symbol.for('removeTrash') const _renamePath = Symbol.for('renamePath') const _rollbackRetireShallowNodes = Symbol.for('rollbackRetireShallowNodes') const _rollbackCreateSparseTree = Symbol.for('rollbackCreateSparseTree') const _rollbackMoveBackRetiredUnchanged = Symbol.for('rollbackMoveBackRetiredUnchanged') const _saveIdealTree = Symbol.for('saveIdealTree') const _reifyPackages = Symbol.for('reifyPackages') // defined by build-ideal-tree mixin const _resolvedAdd = Symbol.for('resolvedAdd') const _usePackageLock = Symbol.for('usePackageLock') // used by build-ideal-tree mixin const _addNodeToTrashList = Symbol.for('addNodeToTrashList') const _createIsolatedTree = Symbol.for('createIsolatedTree') module.exports = cls => class Reifier extends cls { #bundleMissing = new Set() // child nodes we'd EXPECT to be included in a bundle, but aren't #bundleUnpacked = new Set() // the nodes we unpack to read their bundles #dryRun #nmValidated = new Set() #omitDev #omitPeer #omitOptional #retiredPaths = {} #retiredUnchanged = {} #savePrefix #shrinkwrapInflated = new Set() #sparseTreeDirs = new Set() #sparseTreeRoots = new Set() constructor (options) { super(options) this[_trashList] = new Set() } // public method async reify (options = {}) { const linked = (options.installStrategy || this.options.installStrategy) === 'linked' if (this.options.packageLockOnly && this.options.global) { const er = new Error('cannot generate lockfile for global packages') er.code = 'ESHRINKWRAPGLOBAL' throw er } const omit = new Set(options.omit || []) this.#omitDev = omit.has('dev') this.#omitOptional = omit.has('optional') this.#omitPeer = omit.has('peer') // start tracker block this.addTracker('reify') const timeEnd = time.start('reify') // don't create missing dirs on dry runs if (!this.options.packageLockOnly && !this.options.dryRun) { // we do NOT want to set ownership on this folder, especially // recursively, because it can have other side effects to do that // in a project directory. We just want to make it if it's missing. await mkdir(resolve(this.path), { recursive: true }) // do not allow the top-level node_modules to be a symlink await this.#validateNodeModules(resolve(this.path, 'node_modules')) } await this[_loadTrees](options) const oldTree = this.idealTree if (linked) { // swap out the tree with the isolated tree // this is currently technical debt which will be resolved in a refactor // of Node/Link trees log.warn('reify', 'The "linked" install strategy is EXPERIMENTAL and may contain bugs.') this.idealTree = await this[_createIsolatedTree]() } await this[_diffTrees]() await this[_reifyPackages]() if (linked) { // swap back in the idealTree // so that the lockfile is preserved this.idealTree = oldTree } await this[_saveIdealTree](options) // clean up any trash that is still in the tree for (const path of this[_trashList]) { const loc = relpath(this.idealTree.realpath, path) const node = this.idealTree.inventory.get(loc) if (node && node.root === this.idealTree) { node.parent = null } } // if we filtered to only certain nodes, then anything ELSE needs // to be untouched in the resulting actual tree, even if it differs // in the idealTree. Copy over anything that was in the actual and // was not changed, delete anything in the ideal and not actual. // Then we move the entire idealTree over to this.actualTree, and // save the hidden lockfile. if (this.diff && this.diff.filterSet.size) { const reroot = new Set() const { filterSet } = this.diff const seen = new Set() for (const [loc, ideal] of this.idealTree.inventory.entries()) { seen.add(loc) // if it's an ideal node from the filter set, then skip it // because we already made whatever changes were necessary if (filterSet.has(ideal)) { continue } // otherwise, if it's not in the actualTree, then it's not a thing // that we actually added. And if it IS in the actualTree, then // it's something that we left untouched, so we need to record // that. const actual = this.actualTree.inventory.get(loc) if (!actual) { ideal.root = null } else { if ([...actual.linksIn].some(link => filterSet.has(link))) { seen.add(actual.location) continue } const { realpath, isLink } = actual if (isLink && ideal.isLink && ideal.realpath === realpath) { continue } else { reroot.add(actual) } } } // now find any actual nodes that may not be present in the ideal // tree, but were left behind by virtue of not being in the filter for (const [loc, actual] of this.actualTree.inventory.entries()) { if (seen.has(loc)) { continue } seen.add(loc) // we know that this is something that ISN'T in the idealTree, // or else we will have addressed it in the previous loop. // If it's in the filterSet, that means we intentionally removed // it, so nothing to do here. if (filterSet.has(actual)) { continue } reroot.add(actual) } // go through the rerooted actual nodes, and move them over. for (const actual of reroot) { actual.root = this.idealTree } // prune out any tops that lack a linkIn, they are no longer relevant. for (const top of this.idealTree.tops) { if (top.linksIn.size === 0) { top.root = null } } // need to calculate dep flags, since nodes may have been marked // as extraneous or otherwise incorrect during transit. calcDepFlags(this.idealTree) } // save the ideal's meta as a hidden lockfile after we actualize it this.idealTree.meta.filename = this.idealTree.realpath + '/node_modules/.package-lock.json' this.idealTree.meta.hiddenLockfile = true this.idealTree.meta.lockfileVersion = defaultLockfileVersion this.actualTree = this.idealTree this.idealTree = null if (!this.options.global) { await this.actualTree.meta.save() const ignoreScripts = !!this.options.ignoreScripts // if we aren't doing a dry run or ignoring scripts and we actually made changes to the dep // tree, then run the dependencies scripts if (!this.options.dryRun && !ignoreScripts && this.diff && this.diff.children.length) { const { path, package: pkg } = this.actualTree.target const stdio = this.options.foregroundScripts ? 'inherit' : 'pipe' const { scripts = {} } = pkg for (const event of ['predependencies', 'dependencies', 'postdependencies']) { if (Object.prototype.hasOwnProperty.call(scripts, event)) { log.info('run', pkg._id, event, scripts[event]) await time.start(`reify:run:${event}`, () => runScript({ event, path, pkg, stdio, scriptShell: this.options.scriptShell, })) } } } } // This is a very bad pattern and I can't wait to stop doing it this.auditReport = await this.auditReport this.finishTracker('reify') timeEnd() return treeCheck(this.actualTree) } async [_reifyPackages] () { // we don't submit the audit report or write to disk on dry runs if (this.options.dryRun) { return } if (this.options.packageLockOnly) { // we already have the complete tree, so just audit it now, // and that's all we have to do here. return this[_submitQuickAudit]() } // ok, we're about to start touching the fs. need to roll back // if we get an early termination. let reifyTerminated = null const removeHandler = onExit(({ signal }) => { // only call once. if signal hits twice, we just terminate removeHandler() reifyTerminated = Object.assign(new Error('process terminated'), { signal, }) return false }) // [rollbackfn, [...actions]] // after each step, if the process was terminated, execute the rollback // note that each rollback *also* calls the previous one when it's // finished, and then the first one throws the error, so we only need // a new rollback step when we have a new thing that must be done to // revert the install. const steps = [ [_rollbackRetireShallowNodes, [ _retireShallowNodes, ]], [_rollbackCreateSparseTree, [ _createSparseTree, _addOmitsToTrashList, _loadShrinkwrapsAndUpdateTrees, _loadBundlesAndUpdateTrees, _submitQuickAudit, _unpackNewModules, ]], [_rollbackMoveBackRetiredUnchanged, [ _moveBackRetiredUnchanged, _build, ]], ] for (const [rollback, actions] of steps) { for (const action of actions) { try { await this[action]() if (reifyTerminated) { throw reifyTerminated } } catch (er) { // TODO rollbacks shouldn't be relied on to throw err await this[rollback](er) /* istanbul ignore next - rollback throws, should never hit this */ throw er } } } // no rollback for this one, just exit with the error, since the // install completed and can't be safely recovered at this point. await this[_removeTrash]() if (reifyTerminated) { throw reifyTerminated } // done modifying the file system, no need to keep listening for sigs removeHandler() } // when doing a local install, we load everything and figure it all out. // when doing a global install, we *only* care about the explicit requests. [_loadTrees] (options) { const timeEnd = time.start('reify:loadTrees') const bitOpt = { ...options, complete: this.options.packageLockOnly || this.options.dryRun, } // if we're only writing a package lock, then it doesn't matter what's here if (this.options.packageLockOnly) { return this.buildIdealTree(bitOpt).then(timeEnd) } const actualOpt = this.options.global ? { ignoreMissing: true, global: true, filter: (node, kid) => { // if it's not the project root, and we have no explicit requests, // then we're already into a nested dep, so we keep it if (this.explicitRequests.size === 0 || !node.isProjectRoot) { return true } // if we added it as an edgeOut, then we want it if (this.idealTree.edgesOut.has(kid)) { return true } // if it's an explicit request, then we want it const hasExplicit = [...this.explicitRequests] .some(edge => edge.name === kid) if (hasExplicit) { return true } // ignore the rest of the global install folder return false }, } : { ignoreMissing: true } if (!this.options.global) { return Promise.all([ this.loadActual(actualOpt), this.buildIdealTree(bitOpt), ]).then(timeEnd) } // the global install space tends to have a lot of stuff in it. don't // load all of it, just what we care about. we won't be saving a // hidden lockfile in there anyway. Note that we have to load ideal // BEFORE loading actual, so that the actualOpt can use the // explicitRequests which is set during buildIdealTree return this.buildIdealTree(bitOpt) .then(() => this.loadActual(actualOpt)) .then(timeEnd) } [_diffTrees] () { if (this.options.packageLockOnly) { return } const timeEnd = time.start('reify:diffTrees') // XXX if we have an existing diff already, there should be a way // to just invalidate the parts that changed, but avoid walking the // whole tree again. const includeWorkspaces = this.options.workspacesEnabled const includeRootDeps = !includeWorkspaces || this.options.includeWorkspaceRoot && this.options.workspaces.length > 0 const filterNodes = [] if (this.options.global && this.explicitRequests.size) { const idealTree = this.idealTree.target const actualTree = this.actualTree.target // we ONLY are allowed to make changes in the global top-level // children where there's an explicit request. for (const { name } of this.explicitRequests) { const ideal = idealTree.children.get(name) if (ideal) { filterNodes.push(ideal) } const actual = actualTree.children.get(name) if (actual) { filterNodes.push(actual) } } } else { if (includeWorkspaces) { // add all ws nodes to filterNodes for (const ws of this.options.workspaces) { const ideal = this.idealTree.children.get(ws) if (ideal) { filterNodes.push(ideal) } const actual = this.actualTree.children.get(ws) if (actual) { filterNodes.push(actual) } } } if (includeRootDeps) { // add all non-workspace nodes to filterNodes for (const tree of [this.idealTree, this.actualTree]) { for (const { type, to } of tree.edgesOut.values()) { if (type !== 'workspace' && to) { filterNodes.push(to) } } } } } // find all the nodes that need to change between the actual // and ideal trees. this.diff = Diff.calculate({ shrinkwrapInflated: this.#shrinkwrapInflated, filterNodes, actual: this.actualTree, ideal: this.idealTree, }) // we don't have to add 'removed' folders to the trashlist, because // they'll be moved aside to a retirement folder, and then the retired // folder will be deleted at the end. This is important when we have // a folder like FOO being "removed" in favor of a folder like "foo", // because if we remove node_modules/FOO on case-insensitive systems, // it will remove the dep that we *want* at node_modules/foo. timeEnd() } // add the node and all its bins to the list of things to be // removed later on in the process. optionally, also mark them // as a retired paths, so that we move them out of the way and // replace them when rolling back on failure. [_addNodeToTrashList] (node, retire = false) { const paths = [node.path, ...node.binPaths] const moves = this.#retiredPaths log.silly('reify', 'mark', retire ? 'retired' : 'deleted', paths) for (const path of paths) { if (retire) { const retired = retirePath(path) moves[path] = retired this[_trashList].add(retired) } else { this[_trashList].add(path) } } } // move aside the shallowest nodes in the tree that have to be // changed or removed, so that we can rollback if necessary. [_retireShallowNodes] () { const timeEnd = time.start('reify:retireShallow') const moves = this.#retiredPaths = {} for (const diff of this.diff.children) { if (diff.action === 'CHANGE' || diff.action === 'REMOVE') { // we'll have to clean these up at the end, so add them to the list this[_addNodeToTrashList](diff.actual, true) } } log.silly('reify', 'moves', moves) const movePromises = Object.entries(moves) .map(([from, to]) => this[_renamePath](from, to)) return promiseAllRejectLate(movePromises).then(timeEnd) } [_renamePath] (from, to, didMkdirp = false) { return moveFile(from, to) .catch(er => { // Occasionally an expected bin file might not exist in the package, // or a shim/symlink might have been moved aside. If we've already // handled the most common cause of ENOENT (dir doesn't exist yet), // then just ignore any ENOENT. if (er.code === 'ENOENT') { return didMkdirp ? null : mkdir(dirname(to), { recursive: true }).then(() => this[_renamePath](from, to, true)) } else if (er.code === 'EEXIST') { return rm(to, { recursive: true, force: true }).then(() => moveFile(from, to)) } else { throw er } }) } [_rollbackRetireShallowNodes] (er) { const timeEnd = time.start('reify:rollback:retireShallow') const moves = this.#retiredPaths const movePromises = Object.entries(moves) .map(([from, to]) => this[_renamePath](to, from)) return promiseAllRejectLate(movePromises) // ignore subsequent rollback errors .catch(() => {}) .then(timeEnd) .then(() => { throw er }) } // adding to the trash list will skip reifying, and delete them // if they are currently in the tree and otherwise untouched. [_addOmitsToTrashList] () { if (!this.#omitDev && !this.#omitOptional && !this.#omitPeer) { return } const timeEnd = time.start('reify:trashOmits') for (const node of this.idealTree.inventory.values()) { const { top } = node // if the top is not the root or workspace then we do not want to omit it if (!top.isProjectRoot && !top.isWorkspace) { continue } // if a diff filter has been created, then we do not omit the node if the // top node is not in that set if (this.diff?.filterSet?.size && !this.diff.filterSet.has(top)) { continue } // omit node if the dep type matches any omit flags that were set if ( node.peer && this.#omitPeer || node.dev && this.#omitDev || node.optional && this.#omitOptional || node.devOptional && this.#omitOptional && this.#omitDev ) { this[_addNodeToTrashList](node) } } timeEnd() } [_createSparseTree] () { const timeEnd = time.start('reify:createSparse') // if we call this fn again, we look for the previous list // so that we can avoid making the same directory multiple times const leaves = this.diff.leaves .filter(diff => { return (diff.action === 'ADD' || diff.action === 'CHANGE') && !this.#sparseTreeDirs.has(diff.ideal.path) && !diff.ideal.isLink }) .map(diff => diff.ideal) // we check this in parallel, so guard against multiple attempts to // retire the same path at the same time. const dirsChecked = new Set() return promiseAllRejectLate(leaves.map(async node => { for (const d of walkUp(node.path)) { if (d === node.top.path) { break } if (dirsChecked.has(d)) { continue } dirsChecked.add(d) const st = await lstat(d).catch(() => null) // this can happen if we have a link to a package with a name // that the filesystem treats as if it is the same thing. // would be nice to have conditional istanbul ignores here... /* istanbul ignore next - defense in depth */ if (st && !st.isDirectory()) { const retired = retirePath(d) this.#retiredPaths[d] = retired this[_trashList].add(retired) await this[_renamePath](d, retired) } } this.#sparseTreeDirs.add(node.path) const made = await mkdir(node.path, { recursive: true }) // if the directory already exists, made will be undefined. if that's the case // we don't want to remove it because we aren't the ones who created it so we // omit it from the #sparseTreeRoots if (made) { this.#sparseTreeRoots.add(made) } })).then(timeEnd) } [_rollbackCreateSparseTree] (er) { const timeEnd = time.start('reify:rollback:createSparse') // cut the roots of the sparse tree that were created, not the leaves const roots = this.#sparseTreeRoots // also delete the moves that we retired, so that we can move them back const failures = [] const targets = [...roots, ...Object.keys(this.#retiredPaths)] const unlinks = targets .map(path => rm(path, { recursive: true, force: true }).catch(er => failures.push([path, er]))) return promiseAllRejectLate(unlinks).then(() => { // eslint-disable-next-line promise/always-return if (failures.length) { log.warn('cleanup', 'Failed to remove some directories', failures) } }) .then(timeEnd) .then(() => this[_rollbackRetireShallowNodes](er)) } // shrinkwrap nodes define their dependency branches with a file, so // we need to unpack them, read that shrinkwrap file, and then update // the tree by calling loadVirtual with the node as the root. [_loadShrinkwrapsAndUpdateTrees] () { const seen = this.#shrinkwrapInflated const shrinkwraps = this.diff.leaves .filter(d => (d.action === 'CHANGE' || d.action === 'ADD' || !d.action) && d.ideal.hasShrinkwrap && !seen.has(d.ideal) && !this[_trashList].has(d.ideal.path)) if (!shrinkwraps.length) { return } const timeEnd = time.start('reify:loadShrinkwraps') const Arborist = this.constructor return promiseAllRejectLate(shrinkwraps.map(diff => { const node = diff.ideal seen.add(node) return diff.action ? this[_reifyNode](node) : node })) .then(nodes => promiseAllRejectLate(nodes.map(node => new Arborist({ ...this.options, path: node.path, }).loadVirtual({ root: node })))) // reload the diff and sparse tree because the ideal tree changed .then(() => this[_diffTrees]()) .then(() => this[_createSparseTree]()) .then(() => this[_addOmitsToTrashList]()) .then(() => this[_loadShrinkwrapsAndUpdateTrees]()) .then(timeEnd) } // create a symlink for Links, extract for Nodes // return the node object, since we usually want that // handle optional dep failures here // If node is in trash list, skip it // If reifying fails, and the node is optional, add it and its optionalSet // to the trash list // Always return the node. [_reifyNode] (node) { if (this[_trashList].has(node.path)) { return node } const timeEnd = time.start(`reifyNode:${node.location}`) this.addTracker('reify', node.name, node.location) const { npmVersion, nodeVersion, cpu, os, libc } = this.options const p = Promise.resolve().then(async () => { // when we reify an optional node, check the engine and platform // first. be sure to ignore the --force and --engine-strict flags, // since we always want to skip any optional packages we can't install. // these checks throwing will result in a rollback and removal // of the mismatches // eslint-disable-next-line promise/always-return if (node.optional) { checkEngine(node.package, npmVersion, nodeVersion, false) checkPlatform(node.package, false, { cpu, os, libc }) } await this[_checkBins](node) await this.#extractOrLink(node) const { _id, deprecated } = node.package // The .catch is in _handleOptionalFailure. Not ideal, this should be cleaned up. // eslint-disable-next-line promise/always-return if (deprecated) { log.warn('deprecated', `${_id}: ${deprecated}`) } }) return this[_handleOptionalFailure](node, p) .then(() => { this.finishTracker('reify', node.name, node.location) timeEnd() return node }) } // do not allow node_modules to be a symlink async #validateNodeModules (nm) { if (this.options.force || this.#nmValidated.has(nm)) { return } const st = await lstat(nm).catch(() => null) if (!st || st.isDirectory()) { this.#nmValidated.add(nm) return } log.warn('reify', 'Removing non-directory', nm) await rm(nm, { recursive: true, force: true }) } async #extractOrLink (node) { const nm = resolve(node.parent.path, 'node_modules') await this.#validateNodeModules(nm) if (!node.isLink) { // in normal cases, node.resolved should *always* be set by now. // however, it is possible when a lockfile is damaged, or very old, // or in some other race condition bugs in npm v6, that a previously // bundled dependency will have just a version, but no resolved value, // and no 'bundled: true' setting. // Do the best with what we have, or else remove it from the tree // entirely, since we can't possibly reify it. let res = null if (node.resolved) { const registryResolved = this.#registryResolved(node.resolved) if (registryResolved) { res = `${node.name}@${registryResolved}` } } else if (node.package.name && node.version) { res = `${node.package.name}@${node.version}` } // no idea what this thing is. remove it from the tree. if (!res) { const warning = 'invalid or damaged lockfile detected\n' + 'please re-try this operation once it completes\n' + 'so that the damage can be corrected, or perform\n' + 'a fresh install with no lockfile if the problem persists.' log.warn('reify', warning) log.verbose('reify', 'unrecognized node in tree', node.path) node.parent = null node.fsParent = null this[_addNodeToTrashList](node) return } await debug(async () => { const st = await lstat(node.path).catch(() => null) if (st && !st.isDirectory()) { debug.log('unpacking into a non-directory', node) throw Object.assign(new Error('ENOTDIR: not a directory'), { code: 'ENOTDIR', path: node.path, }) } }) await pacote.extract(res, node.path, { ...this.options, resolved: node.resolved, integrity: node.integrity, }) // store nodes don't use Node class so node.package doesn't get updated if (node.isInStore) { const pkg = await rpj(join(node.path, 'package.json')) node.package.scripts = pkg.scripts } return } // node.isLink await rm(node.path, { recursive: true, force: true }) // symlink const dir = dirname(node.path) const target = node.realpath const rel = relative(dir, target) await mkdir(dir, { recursive: true }) return symlink(rel, node.path, 'junction') } // if the node is optional, then the failure of the promise is nonfatal // just add it and its optional set to the trash list. [_handleOptionalFailure] (node, p) { return (node.optional ? p.catch(() => { const set = optionalSet(node) for (node of set) { log.verbose('reify', 'failed optional dependency', node.path) this[_addNodeToTrashList](node) } }) : p).then(() => node) } #registryResolved (resolved) { // the default registry url is a magic value meaning "the currently // configured registry". // `resolved` must never be falsey. // // XXX: use a magic string that isn't also a valid value, like // ${REGISTRY} or something. This has to be threaded through the // Shrinkwrap and Node classes carefully, so for now, just treat // the default reg as the magical animal that it has been. const resolvedURL = hgi.parseUrl(resolved) if (!resolvedURL) { // if we could not parse the url at all then returning nothing // here means it will get removed from the tree in the next step return } if ((this.options.replaceRegistryHost === resolvedURL.hostname) || this.options.replaceRegistryHost === 'always') { // this.registry always has a trailing slash return `${this.registry.slice(0, -1)}${resolvedURL.pathname}${resolvedURL.searchParams}` } return resolved } // bundles are *sort of* like shrinkwraps, in that the branch is defined // by the contents of the package. however, in their case, rather than // shipping a virtual tree that must be reified, they ship an entire // reified actual tree that must be unpacked and not modified. [_loadBundlesAndUpdateTrees] (depth = 0, bundlesByDepth) { let maxBundleDepth if (!bundlesByDepth) { bundlesByDepth = new Map() maxBundleDepth = -1 dfwalk({ tree: this.diff, visit: diff => { const node = diff.ideal if (!node) { return } if (node.isProjectRoot) { return } const { bundleDependencies } = node.package if (bundleDependencies && bundleDependencies.length) { maxBundleDepth = Math.max(maxBundleDepth, node.depth) if (!bundlesByDepth.has(node.depth)) { bundlesByDepth.set(node.depth, [node]) } else { bundlesByDepth.get(node.depth).push(node) } } }, getChildren: diff => diff.children, }) bundlesByDepth.set('maxBundleDepth', maxBundleDepth) } else { maxBundleDepth = bundlesByDepth.get('maxBundleDepth') } if (depth === 0) { time.start('reify:loadBundles') } if (depth > maxBundleDepth) { // if we did something, then prune the tree and update the diffs if (maxBundleDepth !== -1) { this.#pruneBundledMetadeps(bundlesByDepth) this[_diffTrees]() } time.end('reify:loadBundles') return } // skip any that have since been removed from the tree, eg by a // shallower bundle overwriting them with a bundled meta-dep. const set = (bundlesByDepth.get(depth) || []) .filter(node => node.root === this.idealTree && node.target !== node.root && !this[_trashList].has(node.path)) if (!set.length) { return this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth) } // extract all the nodes with bundles return promiseCallLimit(set.map(node => { return () => { this.#bundleUnpacked.add(node) return this[_reifyNode](node) } }), { rejectLate: true }) // then load their unpacked children and move into the ideal tree .then(nodes => promiseAllRejectLate(nodes.map(async node => { const arb = new this.constructor({ ...this.options, path: node.path, }) const notTransplanted = new Set(node.children.keys()) await arb.loadActual({ root: node, // don't transplant any sparse folders we created // loadActual will set node.package to {} for empty directories // if by chance there are some empty folders in the node_modules // tree for some other reason, then ok, ignore those too. transplantFilter: node => { if (node.package._id) { // it's actually in the bundle if it gets transplanted notTransplanted.delete(node.name) return true } else { return false } }, }) for (const name of notTransplanted) { this.#bundleMissing.add(node.children.get(name)) } }))) // move onto the next level of bundled items .then(() => this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth)) } // https://github.com/npm/cli/issues/1597#issuecomment-667639545 #pruneBundledMetadeps (bundlesByDepth) { const bundleShadowed = new Set() // Example dep graph: // root -> (a, c) // a -> BUNDLE(b) // b -> c // c -> b // // package tree: // root // +-- a // | +-- b(1) // | +-- c(1) // +-- b(2) // +-- c(2) // 1. mark everything that's shadowed by anything in the bundle. This // marks b(2) and c(2). // 2. anything with edgesIn from outside the set, mark not-extraneous, // remove from set. This unmarks c(2). // 3. continue until no change // 4. remove everything in the set from the tree. b(2) is pruned // create the list of nodes shadowed by children of bundlers for (const bundles of bundlesByDepth.values()) { // skip the 'maxBundleDepth' item if (!Array.isArray(bundles)) { continue } for (const node of bundles) { for (const name of node.children.keys()) { const shadow = node.parent.resolve(name) if (!shadow) { continue } bundleShadowed.add(shadow) shadow.extraneous = true } } } // lib -> (a@1.x) BUNDLE(a@1.2.3 (b@1.2.3)) // a@1.2.3 -> (b@1.2.3) // a@1.3.0 -> (b@2) // b@1.2.3 -> () // b@2 -> (c@2) // // root // +-- lib // | +-- a@1.2.3 // | +-- b@1.2.3 // +-- b@2 <-- shadowed, now extraneous // +-- c@2 <-- also shadowed, because only dependent is shadowed for (const shadow of bundleShadowed) { for (const shadDep of shadow.edgesOut.values()) { /* istanbul ignore else - pretty unusual situation, just being * defensive here. Would mean that a bundled dep has a dependency * that is unmet. which, weird, but if you bundle it, we take * whatever you put there and assume the publisher knows best. */ if (shadDep.to) { bundleShadowed.add(shadDep.to) shadDep.to.extraneous = true } } } let changed do { changed = false for (const shadow of bundleShadowed) { for (const edge of shadow.edgesIn) { if (!bundleShadowed.has(edge.from)) { shadow.extraneous = false bundleShadowed.delete(shadow) changed = true break } } } } while (changed) for (const shadow of bundleShadowed) { this[_addNodeToTrashList](shadow) shadow.root = null } } async [_submitQuickAudit] () { if (this.options.audit === false) { this.auditReport = null return } // we submit the quick audit at this point in the process, as soon as // we have all the deps resolved, so that it can overlap with the other // actions as much as possible. Stash the promise, which we resolve // before finishing the reify() and returning the tree. Thus, we do // NOT return the promise, as the intent is for this to run in parallel // with the reification, and be resolved at a later time. const timeEnd = time.start('reify:audit') const options = { ...this.options } const tree = this.idealTree // if we're operating on a workspace, only audit the workspace deps if (this.options.workspaces.length) { options.filterSet = this.workspaceDependencySet( tree, this.options.workspaces, this.options.includeWorkspaceRoot ) } this.auditReport = AuditReport.load(tree, options).then(res => { timeEnd() return res }) } // ok! actually unpack stuff into their target locations! // The sparse tree has already been created, so we walk the diff // kicking off each unpack job. If any fail, we rm the sparse // tree entirely and try to put everything back where it was. [_unpackNewModules] () { const timeEnd = time.start('reify:unpack') const unpacks = [] dfwalk({ tree: this.diff, visit: diff => { // no unpacking if we don't want to change this thing if (diff.action !== 'CHANGE' && diff.action !== 'ADD') { return } const node = diff.ideal const bd = this.#bundleUnpacked.has(node) const sw = this.#shrinkwrapInflated.has(node) const bundleMissing = this.#bundleMissing.has(node) // check whether we still need to unpack this one. // test the inDepBundle last, since that's potentially a tree walk. const doUnpack = node && // can't unpack if removed! // root node already exists !node.isRoot && // already unpacked to read bundle !bd && // already unpacked to read sw !sw && // already unpacked by another dep's bundle (bundleMissing || !node.inDepBundle) if (doUnpack) { unpacks.push(this[_reifyNode](node)) } }, getChildren: diff => diff.children, }) return promiseAllRejectLate(unpacks).then(timeEnd) } // This is the part where we move back the unchanging nodes that were // the children of a node that did change. If this fails, the rollback // is a three-step process. First, we try to move the retired unchanged // nodes BACK to their retirement folders, then delete the sparse tree, // then move everything out of retirement. [_moveBackRetiredUnchanged] () { // get a list of all unchanging children of any shallow retired nodes // if they are not the ancestor of any node in the diff set, then the // directory won't already exist, so just rename it over. // This is sort of an inverse diff tree, of all the nodes where // the actualTree and idealTree _don't_ differ, starting from the // shallowest nodes that we moved aside in the first place. const timeEnd = time.start('reify:unretire') const moves = this.#retiredPaths this.#retiredUnchanged = {} return promiseAllRejectLate(this.diff.children.map(diff => { // skip if nothing was retired if (diff.action !== 'CHANGE' && diff.action !== 'REMOVE') { return } const { path: realFolder } = diff.actual const retireFolder = moves[realFolder] /* istanbul ignore next - should be impossible */ debug(() => { if (!retireFolder) { const er = new Error('trying to un-retire but not retired') throw Object.assign(er, { realFolder, retireFolder, actual: diff.actual, ideal: diff.ideal, action: diff.action, }) } }) this.#retiredUnchanged[retireFolder] = [] return promiseAllRejectLate(diff.unchanged.map(node => { // no need to roll back links, since we'll just delete them anyway if (node.isLink) { return mkdir(dirname(node.path), { recursive: true, force: true }) .then(() => this[_reifyNode](node)) } // will have been moved/unpacked along with bundler if (node.inDepBundle && !this.#bundleMissing.has(node)) { return } this.#retiredUnchanged[retireFolder].push(node) const rel = relative(realFolder, node.path) const fromPath = resolve(retireFolder, rel) // if it has bundleDependencies, then make node_modules. otherwise // skip it. const bd = node.package.bundleDependencies const dir = bd && bd.length ? node.path + '/node_modules' : node.path return mkdir(dir, { recursive: true }).then(() => this[_moveContents](node, fromPath)) })) })).then(timeEnd) } // move the contents from the fromPath to the node.path [_moveContents] (node, fromPath) { return packageContents({ path: fromPath, depth: 1, packageJsonCache: new Map([[fromPath + '/package.json', node.package]]), }).then(res => promiseAllRejectLate(res.map(path => { const rel = relative(fromPath, path) const to = resolve(node.path, rel) return this[_renamePath](path, to) }))) } [_rollbackMoveBackRetiredUnchanged] (er) { const moves = this.#retiredPaths // flip the mapping around to go back const realFolders = new Map(Object.entries(moves).map(([k, v]) => [v, k])) const promises = Object.entries(this.#retiredUnchanged) .map(([retireFolder, nodes]) => promiseAllRejectLate(nodes.map(node => { const realFolder = realFolders.get(retireFolder) const rel = relative(realFolder, node.path) const fromPath = resolve(retireFolder, rel) return this[_moveContents]({ ...node, path: fromPath }, node.path) }))) return promiseAllRejectLate(promises) .then(() => this[_rollbackCreateSparseTree](er)) } [_build] () { const timeEnd = time.start('reify:build') // for all the things being installed, run their appropriate scripts // run in tip->root order, so as to be more likely to build a node's // deps before attempting to build it itself const nodes = [] dfwalk({ tree: this.diff, leave: diff => { if (!diff.ideal.isProjectRoot) { nodes.push(diff.ideal) } }, // process adds before changes, ignore removals getChildren: diff => diff && diff.children, filter: diff => diff.action === 'ADD' || diff.action === 'CHANGE', }) // pick up link nodes from the unchanged list as we want to run their // scripts in every install despite of having a diff status change for (const node of this.diff.unchanged) { const tree = node.root.target // skip links that only live within node_modules as they are most // likely managed by packages we installed, we only want to rebuild // unchanged links we directly manage const linkedFromRoot = node.parent === tree || node.target.fsTop === tree if (node.isLink && linkedFromRoot) { nodes.push(node) } } return this.rebuild({ nodes, handleOptionalFailure: true }).then(timeEnd) } // the tree is pretty much built now, so it's cleanup time. // remove the retired folders, and any deleted nodes // If this fails, there isn't much we can do but tell the user about it. // Thankfully, it's pretty unlikely that it'll fail, since rm is a node builtin. async [_removeTrash] () { const timeEnd = time.start('reify:trash') const promises = [] const failures = [] const _rm = path => rm(path, { recursive: true, force: true }).catch(er => failures.push([path, er])) for (const path of this[_trashList]) { promises.push(_rm(path)) } await promiseAllRejectLate(promises) if (failures.length) { log.warn('cleanup', 'Failed to remove some directories', failures) } timeEnd() } // last but not least, we save the ideal tree metadata to the package-lock // or shrinkwrap file, and any additions or removals to package.json async [_saveIdealTree] (options) { // the ideal tree is actualized now, hooray! // it still contains all the references to optional nodes that were removed // for install failures. Those still end up in the shrinkwrap, so we // save it first, then prune out the optional trash, and then return it. const save = !(options.save === false) // we check for updates in order to make sure we run save ideal tree // even though save=false since we want `npm update` to be able to // write to package-lock files by default const hasUpdates = this[_updateAll] || this[_updateNames].length // we're going to completely skip save ideal tree in case of a global or // dry-run install and also if the save option is set to false, EXCEPT for // update since the expected behavior for npm7+ is for update to // NOT save to package.json, we make that exception since we still want // saveIdealTree to be able to write the lockfile by default. const saveIdealTree = !( (!save && !hasUpdates) || this.options.global || this.options.dryRun ) if (!saveIdealTree) { return false } const timeEnd = time.start('reify:save') const updatedTrees = new Set() const updateNodes = nodes => { for (const { name, tree: addTree } of nodes) { // addTree either the root, or a workspace const edge = addTree.edgesOut.get(name) const pkg = addTree.package const req = npa.resolve(name, edge.spec, addTree.realpath) const { rawSpec, subSpec } = req const spec = subSpec ? subSpec.rawSpec : rawSpec const child = edge.to // if we tried to install an optional dep, but it was a version // that we couldn't resolve, this MAY be missing. if we haven't // blown up by now, it's because it was not a problem, though, so // just move on. if (!child || !addTree.isTop) { continue } let newSpec // True if the dependency is getting installed from a local file path // In this case it is not possible to do the normal version comparisons // as the new version will be a file path const isLocalDep = req.type === 'directory' || req.type === 'file' if (req.registry) { const version = child.version const prefixRange = version ? this.options.savePrefix + version : '*' // if we installed a range, then we save the range specified // if it is not a subset of the ^x.y.z. eg, installing a range // of `1.x <1.2.3` will not be saved as `^1.2.0`, because that // would allow versions outside the requested range. Tags and // specific versions save with the save-prefix. const isRange = (subSpec || req).type === 'range' let range = spec if ( !isRange || spec === '*' || subset(prefixRange, spec, { loose: true }) ) { range = prefixRange } const pname = child.packageName const alias = name !== pname newSpec = alias ? `npm:${pname}@${range}` : range } else if (req.hosted) { // save the git+https url if it has auth, otherwise shortcut const h = req.hosted const opt = { noCommittish: false } if (h.https && h.auth) { newSpec = `git+${h.https(opt)}` } else { newSpec = h.shortcut(opt) } } else if (isLocalDep) { // when finding workspace nodes, make sure that // we save them using their version instead of // using their relative path if (edge.type === 'workspace') { const { version } = edge.to.target const prefixRange = version ? this.options.savePrefix + version : '*' newSpec = prefixRange } else { // save the relative path in package.json // Normally saveSpec is updated with the proper relative // path already, but it's possible to specify a full absolute // path initially, in which case we can end up with the wrong // thing, so just get the ultimate fetchSpec and relativize it. const p = req.fetchSpec.replace(/^file:/, '') const rel = relpath(addTree.realpath, p).replace(/#/g, '%23') newSpec = `file:${rel}` } } else { newSpec = req.saveSpec } if (options.saveType) { const depType = saveTypeMap.get(options.saveType) pkg[depType][name] = newSpec // rpj will have moved it here if it was in both // if it is empty it will be deleted later if (options.saveType === 'prod' && pkg.optionalDependencies) { delete pkg.optionalDependencies[name] } } else { if (hasSubKey(pkg, 'dependencies', name)) { pkg.dependencies[name] = newSpec } if (hasSubKey(pkg, 'devDependencies', name)) { pkg.devDependencies[name] = newSpec // don't update peer or optional if we don't have to if (hasSubKey(pkg, 'peerDependencies', name) && (isLocalDep || !intersects(newSpec, pkg.peerDependencies[name]))) { pkg.peerDependencies[name] = newSpec } if (hasSubKey(pkg, 'optionalDependencies', name) && (isLocalDep || !intersects(newSpec, pkg.optionalDependencies[name]))) { pkg.optionalDependencies[name] = newSpec } } else { if (hasSubKey(pkg, 'peerDependencies', name)) { pkg.peerDependencies[name] = newSpec } if (hasSubKey(pkg, 'optionalDependencies', name)) { pkg.optionalDependencies[name] = newSpec } } } updatedTrees.add(addTree) } } // Returns true if any of the edges from this node has a semver // range definition that is an exact match to the version installed // e.g: should return true if for a given an installed version 1.0.0, // range is either =1.0.0 or 1.0.0 const exactVersion = node => { for (const edge of node.edgesIn) { try { if (semver.subset(edge.spec, node.version)) { return false } } catch { // ignore errors } } return true } // helper that retrieves an array of nodes that were // potentially updated during the reify process, in order // to limit the number of nodes to check and update, only // select nodes from the inventory that are direct deps // of a given package.json (project root or a workspace) // and in ase of using a list of `names`, restrict nodes // to only names that are found in this list const retrieveUpdatedNodes = names => { const filterDirectDependencies = node => !node.isRoot && node.resolveParent && node.resolveParent.isRoot && (!names || names.includes(node.name)) && exactVersion(node) // skip update for exact ranges const directDeps = this.idealTree.inventory .filter(filterDirectDependencies) // traverses the list of direct dependencies and collect all nodes // to be updated, since any of them might have changed during reify const nodes = [] for (const node of directDeps) { for (const edgeIn of node.edgesIn) { nodes.push({ name: node.name, tree: edgeIn.from.target, }) } } return nodes } if (save) { // when using update all alongside with save, we'll make // sure to refresh every dependency of the root idealTree if (this[_updateAll]) { const nodes = retrieveUpdatedNodes() updateNodes(nodes) } else { // resolvedAdd is the list of user add requests, but with names added // to things like git repos and tarball file/urls. However, if the // user requested 'foo@', and we have a foo@file:../foo, then we should // end up saving the spec we actually used, not whatever they gave us. if (this[_resolvedAdd].length) { updateNodes(this[_resolvedAdd]) } // if updating given dependencies by name, restrict the list of // nodes to check to only those currently in _updateNames if (this[_updateNames].length) { const nodes = retrieveUpdatedNodes(this[_updateNames]) updateNodes(nodes) } // grab any from explicitRequests that had deps removed for (const { from: tree } of this.explicitRequests) { updatedTrees.add(tree) } } } if (save) { for (const tree of updatedTrees) { // refresh the edges so they have the correct specs tree.package = tree.package const pkgJson = await PackageJson.load(tree.path, { create: true }) const { dependencies = {}, devDependencies = {}, optionalDependencies = {}, peerDependencies = {}, // bundleDependencies is not required by PackageJson like the other // fields here PackageJson also doesn't omit an empty array for this // field so defaulting this to an empty array would add that field to // every package.json file. bundleDependencies, } = tree.package pkgJson.update({ dependencies, devDependencies, optionalDependencies, peerDependencies, bundleDependencies, }) await pkgJson.save() } } // before now edge specs could be changing, affecting the `requires` field // in the package lock, so we hold off saving to the very last action if (this[_usePackageLock]) { // preserve indentation, if possible let format = this.idealTree.package[Symbol.for('indent')] if (format === undefined) { format = ' ' } // TODO this ignores options.save await this.idealTree.meta.save({ format: (this.options.formatPackageLock && format) ? format : this.options.formatPackageLock, }) } timeEnd() return true } } PK]�\o������)arborist/lib/arborist/build-ideal-tree.jsnu�[���// mixin implementing the buildIdealTree method const localeCompare = require('@isaacs/string-locale-compare')('en') const rpj = require('read-package-json-fast') const npa = require('npm-package-arg') const pacote = require('pacote') const cacache = require('cacache') const { callLimit: promiseCallLimit } = require('promise-call-limit') const realpath = require('../../lib/realpath.js') const { resolve, dirname } = require('node:path') const treeCheck = require('../tree-check.js') const { readdirScoped } = require('@npmcli/fs') const { lstat, readlink } = require('node:fs/promises') const { depth } = require('treeverse') const { log, time } = require('proc-log') const { redact } = require('@npmcli/redact') const { OK, REPLACE, CONFLICT, } = require('../can-place-dep.js') const PlaceDep = require('../place-dep.js') const debug = require('../debug.js') const fromPath = require('../from-path.js') const calcDepFlags = require('../calc-dep-flags.js') const Shrinkwrap = require('../shrinkwrap.js') const { defaultLockfileVersion } = Shrinkwrap const Node = require('../node.js') const Link = require('../link.js') const addRmPkgDeps = require('../add-rm-pkg-deps.js') const optionalSet = require('../optional-set.js') const { checkEngine, checkPlatform } = require('npm-install-checks') const relpath = require('../relpath.js') const resetDepFlags = require('../reset-dep-flags.js') // note: some of these symbols are shared so we can hit // them with unit tests and reuse them across mixins const _updateAll = Symbol.for('updateAll') const _flagsSuspect = Symbol.for('flagsSuspect') const _setWorkspaces = Symbol.for('setWorkspaces') const _updateNames = Symbol.for('updateNames') const _resolvedAdd = Symbol.for('resolvedAdd') const _usePackageLock = Symbol.for('usePackageLock') const _rpcache = Symbol.for('realpathCache') const _stcache = Symbol.for('statCache') // used by Reify mixin const _addNodeToTrashList = Symbol.for('addNodeToTrashList') // Push items in, pop them sorted by depth and then path // Sorts physically shallower deps up to the front of the queue, because // they'll affect things deeper in, then alphabetical for consistency between // installs class DepsQueue { #deps = [] #sorted = true get length () { return this.#deps.length } push (item) { if (!this.#deps.includes(item)) { this.#sorted = false this.#deps.push(item) } } pop () { if (!this.#sorted) { this.#deps.sort((a, b) => (a.depth - b.depth) || localeCompare(a.path, b.path)) this.#sorted = true } return this.#deps.shift() } } module.exports = cls => class IdealTreeBuilder extends cls { #complete #currentDep = null #depsQueue = new DepsQueue() #depsSeen = new Set() #explicitRequests = new Set() #follow #installStrategy #linkNodes = new Set() #loadFailures = new Set() #manifests = new Map() #mutateTree = false // a map of each module in a peer set to the thing that depended on // that set of peers in the first place. Use a WeakMap so that we // don't hold onto references for nodes that are garbage collected. #peerSetSource = new WeakMap() #preferDedupe = false #prune #strictPeerDeps #virtualRoots = new Map() constructor (options) { super(options) // normalize trailing slash const registry = options.registry || 'https://registry.npmjs.org' options.registry = this.registry = registry.replace(/\/+$/, '') + '/' const { follow = false, installStrategy = 'hoisted', idealTree = null, installLinks = false, legacyPeerDeps = false, packageLock = true, strictPeerDeps = false, workspaces, global, } = options this.#strictPeerDeps = !!strictPeerDeps this.idealTree = idealTree this.installLinks = installLinks this.legacyPeerDeps = legacyPeerDeps this[_usePackageLock] = packageLock this.#installStrategy = global ? 'shallow' : installStrategy this.#follow = !!follow if (workspaces?.length && global) { throw new Error('Cannot operate on workspaces in global mode') } this[_updateAll] = false this[_updateNames] = [] this[_resolvedAdd] = [] } get explicitRequests () { return new Set(this.#explicitRequests) } // public method async buildIdealTree (options = {}) { if (this.idealTree) { return this.idealTree } // allow the user to set reify options on the ctor as well. // XXX: deprecate separate reify() options object. options = { ...this.options, ...options } // an empty array or any falsey value is the same as null if (!options.add || options.add.length === 0) { options.add = null } if (!options.rm || options.rm.length === 0) { options.rm = null } const timeEnd = time.start('idealTree') if (!options.add && !options.rm && !options.update && this.options.global) { throw new Error('global requires add, rm, or update option') } // first get the virtual tree, if possible. If there's a lockfile, then // that defines the ideal tree, unless the root package.json is not // satisfied by what the ideal tree provides. // from there, we start adding nodes to it to satisfy the deps requested // by the package.json in the root. this.#parseSettings(options) // start tracker block this.addTracker('idealTree') try { await this.#initTree() await this.#inflateAncientLockfile() await this.#applyUserRequests(options) await this.#buildDeps() await this.#fixDepFlags() await this.#pruneFailedOptional() await this.#checkEngineAndPlatform() } finally { timeEnd() this.finishTracker('idealTree') } return treeCheck(this.idealTree) } async #checkEngineAndPlatform () { const { engineStrict, npmVersion, nodeVersion } = this.options for (const node of this.idealTree.inventory.values()) { if (!node.optional) { try { checkEngine(node.package, npmVersion, nodeVersion, this.options.force) } catch (err) { if (engineStrict) { throw err } log.warn(err.code, err.message, { package: err.pkgid, required: err.required, current: err.current, }) } checkPlatform(node.package, this.options.force) } } } #parseSettings (options) { const update = options.update === true ? { all: true } : Array.isArray(options.update) ? { names: options.update } : options.update || {} if (update.all || !Array.isArray(update.names)) { update.names = [] } this.#complete = !!options.complete this.#preferDedupe = !!options.preferDedupe // validates list of update names, they must // be dep names only, no semver ranges are supported for (const name of update.names) { const spec = npa(name) const validationError = new TypeError(`Update arguments must only contain package names, eg: npm update ${spec.name}`) validationError.code = 'EUPDATEARGS' // If they gave us anything other than a bare package name if (spec.raw !== spec.name) { throw validationError } } this[_updateNames] = update.names this[_updateAll] = update.all // we prune by default unless explicitly set to boolean false this.#prune = options.prune !== false // set if we add anything, but also set here if we know we'll make // changes and thus have to maybe prune later. this.#mutateTree = !!( options.add || options.rm || update.all || update.names.length ) } // load the initial tree, either the virtualTree from a shrinkwrap, // or just the root node from a package.json async #initTree () { const timeEnd = time.start('idealTree:init') let root if (this.options.global) { root = await this.#globalRootNode() } else { try { const pkg = await rpj(this.path + '/package.json') root = await this.#rootNodeFromPackage(pkg) } catch (err) { if (err.code === 'EJSONPARSE') { throw err } root = await this.#rootNodeFromPackage({}) } } return this[_setWorkspaces](root) // ok to not have a virtual tree. probably initial install. // When updating all, we load the shrinkwrap, but don't bother // to build out the full virtual tree from it, since we'll be // reconstructing it anyway. .then(root => this.options.global ? root : !this[_usePackageLock] || this[_updateAll] ? Shrinkwrap.reset({ path: this.path, lockfileVersion: this.options.lockfileVersion, resolveOptions: this.options, }).then(meta => Object.assign(root, { meta })) : this.loadVirtual({ root })) // if we don't have a lockfile to go from, then start with the // actual tree, so we only make the minimum required changes. // don't do this for global installs or updates, because in those // cases we don't use a lockfile anyway. // Load on a new Arborist object, so the Nodes aren't the same, // or else it'll get super confusing when we change them! .then(async root => { if ((!this[_updateAll] && !this.options.global && !root.meta.loadedFromDisk) || (this.options.global && this[_updateNames].length)) { await new this.constructor(this.options).loadActual({ root }) const tree = root.target // even though we didn't load it from a package-lock.json FILE, // we still loaded it "from disk", meaning we have to reset // dep flags before assuming that any mutations were reflected. if (tree.children.size) { root.meta.loadedFromDisk = true // set these so that we don't try to ancient lockfile reload it root.meta.originalLockfileVersion = root.meta.lockfileVersion = this.options.lockfileVersion || defaultLockfileVersion } } root.meta.inferFormattingOptions(root.package) return root }) .then(tree => { // search the virtual tree for invalid edges, if any are found add their source to // the depsQueue so that we'll fix it later depth({ tree, getChildren: (node) => { const children = [] for (const edge of node.edgesOut.values()) { children.push(edge.to) } return children }, filter: node => node, visit: node => { for (const edge of node.edgesOut.values()) { if (!edge.valid) { this.#depsQueue.push(node) break // no need to continue the loop after the first hit } } }, }) // null the virtual tree, because we're about to hack away at it // if you want another one, load another copy. this.idealTree = tree this.virtualTree = null timeEnd() return tree }) } async #globalRootNode () { const root = await this.#rootNodeFromPackage({ dependencies: {} }) // this is a gross kludge to handle the fact that we don't save // metadata on the root node in global installs, because the "root" // node is something like /usr/local/lib. const meta = new Shrinkwrap({ path: this.path, lockfileVersion: this.options.lockfileVersion, resolveOptions: this.options, }) meta.reset() root.meta = meta return root } async #rootNodeFromPackage (pkg) { // if the path doesn't exist, then we explode at this point. Note that // this is not a problem for reify(), since it creates the root path // before ever loading trees. // TODO: make buildIdealTree() and loadActual handle a missing root path, // or a symlink to a missing target, and let reify() create it as needed. const real = await realpath(this.path, this[_rpcache], this[_stcache]) const Cls = real === this.path ? Node : Link const root = new Cls({ path: this.path, realpath: real, pkg, extraneous: false, dev: false, devOptional: false, peer: false, optional: false, global: this.options.global, installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, loadOverrides: true, }) if (root.isLink) { root.target = new Node({ path: real, realpath: real, pkg, extraneous: false, dev: false, devOptional: false, peer: false, optional: false, global: this.options.global, installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, root, }) } return root } // process the add/rm requests by modifying the root node, and the // update.names request by queueing nodes dependent on those named. async #applyUserRequests (options) { const timeEnd = time.start('idealTree:userRequests') const tree = this.idealTree.target if (!this.options.workspaces.length) { await this.#applyUserRequestsToNode(tree, options) } else { const nodes = this.workspaceNodes(tree, this.options.workspaces) if (this.options.includeWorkspaceRoot) { nodes.push(tree) } const appliedRequests = nodes.map( node => this.#applyUserRequestsToNode(node, options) ) await Promise.all(appliedRequests) } timeEnd() } async #applyUserRequestsToNode (tree, options) { // If we have a list of package names to update, and we know it's // going to update them wherever they are, add any paths into those // named nodes to the buildIdealTree queue. if (!this.options.global && this[_updateNames].length) { this.#queueNamedUpdates() } // global updates only update the globalTop nodes, but we need to know // that they're there, and not reinstall the world unnecessarily. const globalExplicitUpdateNames = [] if (this.options.global && (this[_updateAll] || this[_updateNames].length)) { const nm = resolve(this.path, 'node_modules') const paths = await readdirScoped(nm).catch(() => []) for (const p of paths) { const name = p.replace(/\\/g, '/') tree.package.dependencies = tree.package.dependencies || {} const updateName = this[_updateNames].includes(name) if (this[_updateAll] || updateName) { if (updateName) { globalExplicitUpdateNames.push(name) } const dir = resolve(nm, name) const st = await lstat(dir) .catch(/* istanbul ignore next */ () => null) if (st && st.isSymbolicLink()) { const target = await readlink(dir) const real = resolve(dirname(dir), target).replace(/#/g, '%23') tree.package.dependencies[name] = `file:${real}` } else { tree.package.dependencies[name] = '*' } } } } if (this.auditReport && this.auditReport.size > 0) { await this.#queueVulnDependents(options) } const { add, rm } = options if (rm && rm.length) { addRmPkgDeps.rm(tree.package, rm) for (const name of rm) { this.#explicitRequests.add({ from: tree, name, action: 'DELETE' }) } } if (add && add.length) { await this.#add(tree, options) } // triggers a refresh of all edgesOut. this has to be done BEFORE // adding the edges to explicitRequests, because the package setter // resets all edgesOut. if (add && add.length || rm && rm.length || this.options.global) { tree.package = tree.package } for (const spec of this[_resolvedAdd]) { if (spec.tree === tree) { this.#explicitRequests.add(tree.edgesOut.get(spec.name)) } } for (const name of globalExplicitUpdateNames) { this.#explicitRequests.add(tree.edgesOut.get(name)) } this.#depsQueue.push(tree) } // This returns a promise because we might not have the name yet, and need to // call pacote.manifest to find the name. async #add (tree, { add, saveType = null, saveBundle = false }) { // If we have a link it will need to be added relative to the target's path const path = tree.target.path // get the name for each of the specs in the list. // ie, doing `foo@bar` we just return foo but if it's a url or git, we // don't know the name until we fetch it and look in its manifest. await Promise.all(add.map(async rawSpec => { // We do NOT provide the path to npa here, because user-additions need to // be resolved relative to the tree being added to. let spec = npa(rawSpec) // if it's just @'' then we reload whatever's there, or get latest // if it's an explicit tag, we need to install that specific tag version const isTag = spec.rawSpec && spec.type === 'tag' // look up the names of file/directory/git specs if (!spec.name || isTag) { const mani = await pacote.manifest(spec, { ...this.options }) if (isTag) { // translate tag to a version spec = npa(`${mani.name}@${mani.version}`) } spec.name = mani.name } const { name } = spec if (spec.type === 'file') { spec = npa(`file:${relpath(path, spec.fetchSpec).replace(/#/g, '%23')}`, path) spec.name = name } else if (spec.type === 'directory') { try { const real = await realpath(spec.fetchSpec, this[_rpcache], this[_stcache]) spec = npa(`file:${relpath(path, real).replace(/#/g, '%23')}`, path) spec.name = name } catch { // TODO: create synthetic test case to simulate realpath failure } } spec.tree = tree this[_resolvedAdd].push(spec) })) // now this._resolvedAdd is a list of spec objects with names. // find a home for each of them! addRmPkgDeps.add({ pkg: tree.package, add: this[_resolvedAdd], saveBundle, saveType, }) } // TODO: provide a way to fix bundled deps by exposing metadata about // what's in the bundle at each published manifest. Without that, we // can't possibly fix bundled deps without breaking a ton of other stuff, // and leaving the user subject to getting it overwritten later anyway. async #queueVulnDependents (options) { for (const vuln of this.auditReport.values()) { for (const node of vuln.nodes) { const bundler = node.getBundler() // XXX this belongs in the audit report itself, not here. // We shouldn't even get these things here, and they shouldn't // be printed by npm-audit-report as if they can be fixed, because // they can't. if (bundler) { log.warn(`audit fix ${node.name}@${node.version}`, `${node.location}\nis a bundled dependency of\n${ bundler.name}@${bundler.version} at ${bundler.location}\n` + 'It cannot be fixed automatically.\n' + `Check for updates to the ${bundler.name} package.`) continue } for (const edge of node.edgesIn) { this.addTracker('idealTree', edge.from.name, edge.from.location) this.#depsQueue.push(edge.from) } } } // note any that can't be fixed at the root level without --force // if there's a fix, we use that. otherwise, the user has to remove it, // find a different thing, fix the upstream, etc. // // XXX: how to handle top nodes that aren't the root? Maybe the report // just tells the user to cd into that directory and fix it? if (this.options.force && this.auditReport && this.auditReport.topVulns.size) { options.add = options.add || [] options.rm = options.rm || [] const nodesTouched = new Set() for (const [name, topVuln] of this.auditReport.topVulns.entries()) { const { simpleRange, topNodes, fixAvailable, } = topVuln for (const node of topNodes) { if (!node.isProjectRoot && !node.isWorkspace) { // not something we're going to fix, sorry. have to cd into // that directory and fix it yourself. log.warn('audit', 'Manual fix required in linked project ' + `at ./${node.location} for ${name}@${simpleRange}.\n` + `'cd ./${node.location}' and run 'npm audit' for details.`) continue } if (!fixAvailable) { log.warn('audit', `No fix available for ${name}@${simpleRange}`) continue } // name may be different if parent fixes the dep // see Vuln fixAvailable setter const { isSemVerMajor, version, name: fixName } = fixAvailable const breakingMessage = isSemVerMajor ? 'a SemVer major change' : 'outside your stated dependency range' log.warn('audit', `Updating ${fixName} to ${version}, ` + `which is ${breakingMessage}.`) await this.#add(node, { add: [`${fixName}@${version}`] }) nodesTouched.add(node) } } for (const node of nodesTouched) { node.package = node.package } } } #avoidRange (name) { if (!this.auditReport) { return null } const vuln = this.auditReport.get(name) if (!vuln) { return null } return vuln.range } #queueNamedUpdates () { // ignore top nodes, since they are not loaded the same way, and // probably have their own project associated with them. // for every node with one of the names on the list, we add its // dependents to the queue to be evaluated. in buildDepStep, // anything on the update names list will get refreshed, even if // it isn't a problem. // XXX this could be faster by doing a series of inventory.query('name') // calls rather than walking over everything in the tree. for (const node of this.idealTree.inventory.values()) { // XXX add any invalid edgesOut to the queue if (this[_updateNames].includes(node.name) && !node.isTop && !node.inDepBundle && !node.inShrinkwrap) { for (const edge of node.edgesIn) { this.addTracker('idealTree', edge.from.name, edge.from.location) this.#depsQueue.push(edge.from) } } } } async #inflateAncientLockfile () { const { meta, inventory } = this.idealTree const ancient = meta.ancientLockfile const old = meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2) if (inventory.size === 0 || !ancient && !old) { return } // if the lockfile is from node v5 or earlier, then we'll have to reload // all the manifests of everything we encounter. this is costly, but at // least it's just a one-time hit. const timeEnd = time.start('idealTree:inflate') // don't warn if we're not gonna actually write it back anyway. const heading = ancient ? 'ancient lockfile' : 'old lockfile' if (ancient || !this.options.lockfileVersion || this.options.lockfileVersion >= defaultLockfileVersion) { log.warn(heading, ` The ${meta.type} file was created with an old version of npm, so supplemental metadata must be fetched from the registry. This is a one-time fix-up, please be patient... `) } this.addTracker('idealTree:inflate') const queue = [] for (const node of inventory.values()) { if (node.isProjectRoot) { continue } // if the node's location isn't within node_modules then this is actually // a link target, so skip it. the link node itself will be queued later. if (!node.location.startsWith('node_modules')) { continue } queue.push(async () => { log.silly('inflate', node.location) const { resolved, version, path, name, location, integrity } = node // don't try to hit the registry for linked deps const useResolved = resolved && ( !version || resolved.startsWith('file:') ) const id = useResolved ? resolved : version const spec = npa.resolve(name, id, dirname(path)) const t = `idealTree:inflate:${location}` this.addTracker(t) try { const mani = await pacote.manifest(spec, { ...this.options, resolved: resolved, integrity: integrity, fullMetadata: false, }) node.package = { ...mani, _id: `${mani.name}@${mani.version}` } } catch (er) { const warning = `Could not fetch metadata for ${name}@${id}` log.warn(heading, warning, er) } this.finishTracker(t) }) } await promiseCallLimit(queue) // have to re-calc dep flags, because the nodes don't have edges // until their packages get assigned, so everything looks extraneous calcDepFlags(this.idealTree) // yes, yes, this isn't the "original" version, but now that it's been // upgraded, we need to make sure we don't do the work to upgrade it // again, since it's now as new as can be. if (!this.options.lockfileVersion && !meta.hiddenLockfile) { meta.originalLockfileVersion = defaultLockfileVersion } this.finishTracker('idealTree:inflate') timeEnd() } // at this point we have a virtual tree with the actual root node's // package deps, which may be partly or entirely incomplete, invalid // or extraneous. #buildDeps () { const timeEnd = time.start('idealTree:buildDeps') const tree = this.idealTree.target tree.assertRootOverrides() this.#depsQueue.push(tree) // XXX also push anything that depends on a node with a name // in the override list log.silly('idealTree', 'buildDeps') this.addTracker('idealTree', tree.name, '') return this.#buildDepStep().then(timeEnd) } async #buildDepStep () { // removes tracker of previous dependency in the queue if (this.#currentDep) { const { location, name } = this.#currentDep time.end(`idealTree:${location || '#root'}`) this.finishTracker('idealTree', name, location) this.#currentDep = null } if (!this.#depsQueue.length) { return this.#resolveLinks() } const node = this.#depsQueue.pop() const bd = node.package.bundleDependencies const hasBundle = bd && Array.isArray(bd) && bd.length const { hasShrinkwrap } = node // if the node was already visited, or has since been removed from the // tree, skip over it and process the rest of the queue. If a node has // a shrinkwrap, also skip it, because it's going to get its deps // satisfied by whatever's in that file anyway. if (this.#depsSeen.has(node) || node.root !== this.idealTree || hasShrinkwrap && !this.#complete) { return this.#buildDepStep() } this.#depsSeen.add(node) this.#currentDep = node time.start(`idealTree:${node.location || '#root'}`) // if we're loading a _complete_ ideal tree, for a --package-lock-only // installation for example, we have to crack open the tarball and // look inside if it has bundle deps or shrinkwraps. note that this is // not necessary during a reification, because we just update the // ideal tree by reading bundles/shrinkwraps in place. // Don't bother if the node is from the actual tree and hasn't // been resolved, because we can't fetch it anyway, could be anything! const crackOpen = this.#complete && node !== this.idealTree && node.resolved && (hasBundle || hasShrinkwrap) if (crackOpen) { const Arborist = this.constructor const opt = { ...this.options } await cacache.tmp.withTmp(this.cache, opt, async path => { await pacote.extract(node.resolved, path, { ...opt, Arborist, resolved: node.resolved, integrity: node.integrity, }) if (hasShrinkwrap) { await new Arborist({ ...this.options, path }) .loadVirtual({ root: node }) } if (hasBundle) { await new Arborist({ ...this.options, path }) .loadActual({ root: node, ignoreMissing: true }) } }) } // if any deps are missing or invalid, then we fetch the manifest for // the thing we want, and build a new dep node from that. // Then, find the ideal placement for that node. The ideal placement // searches from the node's deps (or parent deps in the case of non-root // peer deps), and walks up the tree until it finds the highest spot // where it doesn't cause any conflicts. // // A conflict can be: // - A node by that name already exists at that location. // - The parent has a peer dep on that name // - One of the node's peer deps conflicts at that location, unless the // peer dep is met by a node at that location, which is fine. // // If we create a new node, then build its ideal deps as well. // // Note: this is the same "maximally naive" deduping tree-building // algorithm that npm has used since v3. In a case like this: // // root -> (a@1, b@1||2) // a -> (b@1) // // You'll end up with a tree like this: // // root // +-- a@1 // | +-- b@1 // +-- b@2 // // rather than this, more deduped, but just as correct tree: // // root // +-- a@1 // +-- b@1 // // Another way to look at it is that this algorithm favors getting higher // version deps at higher levels in the tree, even if that reduces // potential deduplication. // // Set `preferDedupe: true` in the options to replace the shallower // dep if allowed. const tasks = [] const peerSource = this.#peerSetSource.get(node) || node for (const edge of this.#problemEdges(node)) { if (edge.peerConflicted) { continue } // peerSetSource is only relevant when we have a peerEntryEdge // otherwise we're setting regular non-peer deps as if they have // a virtual root of whatever brought in THIS node. // so we VR the node itself if the edge is not a peer const source = edge.peer ? peerSource : node const virtualRoot = this.#virtualRoot(source, true) // reuse virtual root if we already have one, but don't // try to do the override ahead of time, since we MAY be able // to create a more correct tree than the virtual root could. const vrEdge = virtualRoot && virtualRoot.edgesOut.get(edge.name) const vrDep = vrEdge && vrEdge.valid && vrEdge.to // only re-use the virtualRoot if it's a peer edge we're placing. // otherwise, we end up in situations where we override peer deps that // we could have otherwise found homes for. Eg: // xy -> (x, y) // x -> PEER(z@1) // y -> PEER(z@2) // If xy is a dependency, we can resolve this like: // project // +-- xy // | +-- y // | +-- z@2 // +-- x // +-- z@1 // But if x and y are loaded in the same virtual root, then they will // be forced to agree on a version of z. const required = new Set([edge.from]) const parent = edge.peer ? virtualRoot : null const dep = vrDep && vrDep.satisfies(edge) ? vrDep : await this.#nodeFromEdge(edge, parent, null, required) /* istanbul ignore next */ debug(() => { if (!dep) { throw new Error('no dep??') } }) tasks.push({ edge, dep }) } const placeDeps = tasks.sort((a, b) => localeCompare(a.edge.name, b.edge.name)) const promises = [] for (const { edge, dep } of placeDeps) { const pd = new PlaceDep({ edge, dep, auditReport: this.auditReport, explicitRequest: this.#explicitRequests.has(edge), force: this.options.force, installLinks: this.installLinks, installStrategy: this.#installStrategy, legacyPeerDeps: this.legacyPeerDeps, preferDedupe: this.#preferDedupe, strictPeerDeps: this.#strictPeerDeps, updateNames: this[_updateNames], }) // placing a dep is actually a tree of placing the dep itself // and all of its peer group that aren't already met by the tree depth({ tree: pd, getChildren: pd => pd.children, visit: pd => { const { placed, edge, canPlace: cpd } = pd // if we didn't place anything, nothing to do here if (!placed) { return } // we placed something, that means we changed the tree if (placed.errors.length) { this.#loadFailures.add(placed) } this.#mutateTree = true if (cpd.canPlaceSelf === OK) { for (const edgeIn of placed.edgesIn) { if (edgeIn === edge) { continue } const { from, valid, peerConflicted } = edgeIn if (!peerConflicted && !valid && !this.#depsSeen.has(from)) { this.addTracker('idealTree', from.name, from.location) this.#depsQueue.push(edgeIn.from) } } } else { /* istanbul ignore else - should be only OK or REPLACE here */ if (cpd.canPlaceSelf === REPLACE) { // this may also create some invalid edges, for example if we're // intentionally causing something to get nested which was // previously placed in this location. for (const edgeIn of placed.edgesIn) { if (edgeIn === edge) { continue } const { valid, peerConflicted } = edgeIn if (!valid && !peerConflicted) { // if it's already been visited, we have to re-visit // otherwise, just enqueue normally. this.#depsSeen.delete(edgeIn.from) this.#depsQueue.push(edgeIn.from) } } } } /* istanbul ignore if - should be impossible */ if (cpd.canPlaceSelf === CONFLICT) { debug(() => { const er = new Error('placed with canPlaceSelf=CONFLICT') throw Object.assign(er, { placeDep: pd }) }) return } // lastly, also check for the missing deps of the node we placed, // and any holes created by pruning out conflicted peer sets. this.#depsQueue.push(placed) for (const dep of pd.needEvaluation) { this.#depsSeen.delete(dep) this.#depsQueue.push(dep) } // pre-fetch any problem edges, since we'll need these soon // if it fails at this point, though, dont' worry because it // may well be an optional dep that has gone missing. it'll // fail later anyway. for (const e of this.#problemEdges(placed)) { // XXX This is somehow load bearing. This makes tests that print // the ideal tree of a tree with tarball dependencies fail. This // can't be changed or removed till we figure out why // The test is named "tarball deps with transitive tarball deps" promises.push(() => this.#fetchManifest(npa.resolve(e.name, e.spec, fromPath(placed, e))) .catch(() => null) ) } }, }) } for (const { to } of node.edgesOut.values()) { if (to && to.isLink && to.target) { this.#linkNodes.add(to) } } await promiseCallLimit(promises) return this.#buildDepStep() } // loads a node from an edge, and then loads its peer deps (and their // peer deps, on down the line) into a virtual root parent. async #nodeFromEdge (edge, parent_, secondEdge, required) { // create a virtual root node with the same deps as the node that // is requesting this one, so that we can get all the peer deps in // a context where they're likely to be resolvable. // Note that the virtual root will also have virtual copies of the // targets of any child Links, so that they resolve appropriately. const parent = parent_ || this.#virtualRoot(edge.from) const spec = npa.resolve(edge.name, edge.spec, edge.from.path) const first = await this.#nodeFromSpec(edge.name, spec, parent, edge) // we might have a case where the parent has a peer dependency on // `foo@*` which resolves to v2, but another dep in the set has a // peerDependency on `foo@1`. In that case, if we force it to be v2, // we're unnecessarily triggering an ERESOLVE. // If we have a second edge to worry about, and it's not satisfied // by the first node, try a second and see if that satisfies the // original edge here. const spec2 = secondEdge && npa.resolve( edge.name, secondEdge.spec, secondEdge.from.path ) const second = secondEdge && !secondEdge.valid ? await this.#nodeFromSpec(edge.name, spec2, parent, secondEdge) : null // pick the second one if they're both happy with that, otherwise first const node = second && edge.valid ? second : first // ensure the one we want is the one that's placed node.parent = parent if (required.has(edge.from) && edge.type !== 'peerOptional' || secondEdge && ( required.has(secondEdge.from) && secondEdge.type !== 'peerOptional')) { required.add(node) } // keep track of the thing that caused this node to be included. const src = parent.sourceReference this.#peerSetSource.set(node, src) // do not load the peers along with the set if this is a global top pkg // otherwise we'll be tempted to put peers as other top-level installed // things, potentially clobbering what's there already, which is not // what we want. the missing edges will be picked up on the next pass. if (this.options.global && edge.from.isProjectRoot) { return node } // otherwise, we have to make sure that our peers can go along with us. return this.#loadPeerSet(node, required) } #virtualRoot (node, reuse = false) { if (reuse && this.#virtualRoots.has(node)) { return this.#virtualRoots.get(node) } const vr = new Node({ path: node.realpath, sourceReference: node, installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, overrides: node.overrides, }) // also need to set up any targets from any link deps, so that // they are properly reflected in the virtual environment for (const child of node.children.values()) { if (child.isLink) { new Node({ path: child.realpath, sourceReference: child.target, root: vr, }) } } this.#virtualRoots.set(node, vr) return vr } #problemEdges (node) { // skip over any bundled deps, they're not our problem. // Note that this WILL fetch bundled meta-deps which are also dependencies // but not listed as bundled deps. When reifying, we first unpack any // nodes that have bundleDependencies, then do a loadActual on them, move // the nodes into the ideal tree, and then prune. So, fetching those // possibly-bundled meta-deps at this point doesn't cause any worse // problems than a few unnecessary packument fetches. // also skip over any nodes in the tree that failed to load, since those // will crash the install later on anyway. const bd = node.isProjectRoot || node.isWorkspace ? null : node.package.bundleDependencies const bundled = new Set(bd || []) const problems = [] for (const edge of node.edgesOut.values()) { // If it's included in a bundle, we take whatever is specified. if (bundled.has(edge.name)) { continue } // If it's already been logged as a load failure, skip it. if (edge.to && this.#loadFailures.has(edge.to)) { continue } // If it's shrinkwrapped, we use what the shrinkwap wants. if (edge.to && edge.to.inShrinkwrap) { continue } // If the edge has no destination, that's a problem, unless // if it's peerOptional and not explicitly requested. if (!edge.to) { if (edge.type !== 'peerOptional' || this.#explicitRequests.has(edge)) { problems.push(edge) } continue } // If the edge has an error, there's a problem. if (!edge.valid) { problems.push(edge) continue } // If the edge is a workspace, and it's valid, leave it alone if (edge.to.isWorkspace) { continue } // user explicitly asked to update this package by name, problem if (this[_updateNames].includes(edge.name)) { problems.push(edge) continue } // fixing a security vulnerability with this package, problem if (this.auditReport && this.auditReport.isVulnerable(edge.to)) { problems.push(edge) continue } // user has explicitly asked to install this package, problem if (this.#explicitRequests.has(edge)) { problems.push(edge) continue } } return problems } async #fetchManifest (spec) { const options = { ...this.options, avoid: this.#avoidRange(spec.name), fullMetadata: true, } // get the intended spec and stored metadata from yarn.lock file, // if available and valid. spec = this.idealTree.meta.checkYarnLock(spec, options) if (this.#manifests.has(spec.raw)) { return this.#manifests.get(spec.raw) } else { log.silly('fetch manifest', spec.raw.replace(spec.rawSpec, redact(spec.rawSpec))) const mani = await pacote.manifest(spec, options) this.#manifests.set(spec.raw, mani) return mani } } #nodeFromSpec (name, spec, parent, edge) { // pacote will slap integrity on its options, so we have to clone // the object so it doesn't get mutated. // Don't bother to load the manifest for link deps, because the target // might be within another package that doesn't exist yet. const { installLinks, legacyPeerDeps } = this const isWorkspace = this.idealTree.workspaces && this.idealTree.workspaces.has(spec.name) // spec is a directory, link it unless installLinks is set or it's a workspace // TODO post arborist refactor, will need to check for installStrategy=linked if (spec.type === 'directory' && (isWorkspace || !installLinks)) { return this.#linkFromSpec(name, spec, parent, edge) } // if the spec matches a workspace name, then see if the workspace node will // satisfy the edge. if it does, we return the workspace node to make sure it // takes priority. if (isWorkspace) { const existingNode = this.idealTree.edgesOut.get(spec.name).to if (existingNode && existingNode.isWorkspace && existingNode.satisfies(edge)) { return existingNode } } // spec isn't a directory, and either isn't a workspace or the workspace we have // doesn't satisfy the edge. try to fetch a manifest and build a node from that. return this.#fetchManifest(spec) .then(pkg => new Node({ name, pkg, parent, installLinks, legacyPeerDeps }), error => { error.requiredBy = edge.from.location || '.' // failed to load the spec, either because of enotarget or // fetch failure of some other sort. save it so we can verify // later that it's optional, otherwise the error is fatal. const n = new Node({ name, parent, error, installLinks, legacyPeerDeps, }) this.#loadFailures.add(n) return n }) } #linkFromSpec (name, spec, parent) { const realpath = spec.fetchSpec const { installLinks, legacyPeerDeps } = this return rpj(realpath + '/package.json').catch(() => ({})).then(pkg => { const link = new Link({ name, parent, realpath, pkg, installLinks, legacyPeerDeps }) this.#linkNodes.add(link) return link }) } // load all peer deps and meta-peer deps into the node's parent // At the end of this, the node's peer-type outward edges are all // resolved, and so are all of theirs, but other dep types are not. // We prefer to get peer deps that meet the requiring node's dependency, // if possible, since that almost certainly works (since that package was // developed with this set of deps) and will typically be more restrictive. // Note that the peers in the set can conflict either with each other, // or with a direct dependency from the virtual root parent! In strict // mode, this is always an error. In force mode, it never is, and we // prefer the parent's non-peer dep over a peer dep, or the version that // gets placed first. In non-strict mode, we behave strictly if the // virtual root is based on the root project, and allow non-peer parent // deps to override, but throw if no preference can be determined. async #loadPeerSet (node, required) { const peerEdges = [...node.edgesOut.values()] // we typically only install non-optional peers, but we have to // factor them into the peerSet so that we can avoid conflicts .filter(e => e.peer && !(e.valid && e.to)) .sort(({ name: a }, { name: b }) => localeCompare(a, b)) for (const edge of peerEdges) { // already placed this one, and we're happy with it. if (edge.valid && edge.to) { continue } const parentEdge = node.parent.edgesOut.get(edge.name) const { isProjectRoot, isWorkspace } = node.parent.sourceReference const isMine = isProjectRoot || isWorkspace const conflictOK = this.options.force || !isMine && !this.#strictPeerDeps if (!edge.to) { if (!parentEdge) { // easy, just put the thing there await this.#nodeFromEdge(edge, node.parent, null, required) continue } else { // if the parent's edge is very broad like >=1, and the edge in // question is something like 1.x, then we want to get a 1.x, not // a 2.x. pass along the child edge as an advisory guideline. // if the parent edge doesn't satisfy the child edge, and the // child edge doesn't satisfy the parent edge, then we have // a conflict. this is always a problem in strict mode, never // in force mode, and a problem in non-strict mode if this isn't // on behalf of our project. in all such cases, we warn at least. const dep = await this.#nodeFromEdge( parentEdge, node.parent, edge, required ) // hooray! that worked! if (edge.valid) { continue } // allow it. either we're overriding, or it's not something // that will be installed by default anyway, and we'll fail when // we get to the point where we need to, if we need to. if (conflictOK || !required.has(dep)) { edge.peerConflicted = true continue } // problem this.#failPeerConflict(edge, parentEdge) } } // There is something present already, and we're not happy about it // See if the thing we WOULD be happy with is also going to satisfy // the other dependents on the current node. const current = edge.to const dep = await this.#nodeFromEdge(edge, null, null, required) if (dep.canReplace(current)) { await this.#nodeFromEdge(edge, node.parent, null, required) continue } // at this point we know that there is a dep there, and // we don't like it. always fail strictly, always allow forcibly or // in non-strict mode if it's not our fault. don't warn here, because // we are going to warn again when we place the deps, if we end up // overriding for something else. If the thing that has this dep // isn't also required, then there's a good chance we won't need it, // so allow it for now and let it conflict if it turns out to actually // be necessary for the installation. if (conflictOK || !required.has(edge.from)) { continue } // ok, it's the root, or we're in unforced strict mode, so this is bad this.#failPeerConflict(edge, parentEdge) } return node } #failPeerConflict (edge, currentEdge) { const expl = this.#explainPeerConflict(edge, currentEdge) throw Object.assign(new Error('unable to resolve dependency tree'), expl) } #explainPeerConflict (edge, currentEdge) { const node = edge.from const curNode = node.resolve(edge.name) const current = curNode.explain() return { code: 'ERESOLVE', current, // it SHOULD be impossible to get here without a current node in place, // but this at least gives us something report on when bugs creep into // the tree handling logic. currentEdge: currentEdge ? currentEdge.explain() : null, edge: edge.explain(), strictPeerDeps: this.#strictPeerDeps, force: this.options.force, } } // go through all the links in the this.#linkNodes set // for each one: // - if outside the root, ignore it, assume it's fine, it's not our problem // - if a node in the tree already, assign the target to that node. // - if a path under an existing node, then assign that as the fsParent, // and add it to the _depsQueue // // call buildDepStep if anything was added to the queue, otherwise we're done #resolveLinks () { for (const link of this.#linkNodes) { this.#linkNodes.delete(link) // link we never ended up placing, skip it if (link.root !== this.idealTree) { continue } const tree = this.idealTree.target const external = !link.target.isDescendantOf(tree) // outside the root, somebody else's problem, ignore it if (external && !this.#follow) { continue } // didn't find a parent for it or it has not been seen yet // so go ahead and process it. const unseenLink = (link.target.parent || link.target.fsParent) && !this.#depsSeen.has(link.target) if (this.#follow && !link.target.parent && !link.target.fsParent || unseenLink) { this.addTracker('idealTree', link.target.name, link.target.location) this.#depsQueue.push(link.target) } } if (this.#depsQueue.length) { return this.#buildDepStep() } } #fixDepFlags () { const timeEnd = time.start('idealTree:fixDepFlags') const metaFromDisk = this.idealTree.meta.loadedFromDisk const flagsSuspect = this[_flagsSuspect] const mutateTree = this.#mutateTree // if the options set prune:false, then we don't prune, but we still // mark the extraneous items in the tree if we modified it at all. // If we did no modifications, we just iterate over the extraneous nodes. // if we started with an empty tree, then the dep flags are already // all set to true, and there can be nothing extraneous, so there's // nothing to prune, because we built it from scratch. if we didn't // add or remove anything, then also nothing to do. if (metaFromDisk && mutateTree) { resetDepFlags(this.idealTree) } // update all the dev/optional/etc flags in the tree // either we started with a fresh tree, or we // reset all the flags to find the extraneous nodes. // // if we started from a blank slate, or changed something, then // the dep flags will be all set to true. if (!metaFromDisk || mutateTree) { calcDepFlags(this.idealTree) } else { // otherwise just unset all the flags on the root node // since they will sometimes have the default value this.idealTree.extraneous = false this.idealTree.dev = false this.idealTree.optional = false this.idealTree.devOptional = false this.idealTree.peer = false } // at this point, any node marked as extraneous should be pruned. // if we started from a shrinkwrap, and then added/removed something, // then the tree is suspect. Prune what is marked as extraneous. // otherwise, don't bother. const needPrune = metaFromDisk && (mutateTree || flagsSuspect) if (this.#prune && needPrune) { this.#idealTreePrune() for (const node of this.idealTree.inventory.values()) { if (node.extraneous) { node.parent = null } } } timeEnd() } #idealTreePrune () { for (const node of this.idealTree.inventory.values()) { if (node.extraneous) { node.parent = null } } } #pruneFailedOptional () { for (const node of this.#loadFailures) { if (!node.optional) { throw node.errors[0] } const set = optionalSet(node) for (const node of set) { node.parent = null } } } async prune (options = {}) { // allow the user to set options on the ctor as well. // XXX: deprecate separate method options objects. options = { ...this.options, ...options } await this.buildIdealTree(options) this.#idealTreePrune() if (!this.options.workspacesEnabled) { const excludeNodes = this.excludeWorkspacesDependencySet(this.idealTree) for (const node of this.idealTree.inventory.values()) { if ( node.parent !== null && !node.isProjectRoot && !excludeNodes.has(node) ) { this[_addNodeToTrashList](node) } } } return this.reify(options) } } PK]�\=����$arborist/lib/case-insensitive-map.jsnu�[���// package children are represented with a Map object, but many file systems // are case-insensitive and unicode-normalizing, so we need to treat // node.children.get('FOO') and node.children.get('foo') as the same thing. module.exports = class CIMap extends Map { #keys = new Map() constructor (items = []) { super() for (const [key, val] of items) { this.set(key, val) } } #normKey (key) { if (typeof key !== 'string') { return key } return key.normalize('NFKD').toLowerCase() } get (key) { const normKey = this.#normKey(key) return this.#keys.has(normKey) ? super.get(this.#keys.get(normKey)) : undefined } set (key, val) { const normKey = this.#normKey(key) if (this.#keys.has(normKey)) { super.delete(this.#keys.get(normKey)) } this.#keys.set(normKey, key) return super.set(key, val) } delete (key) { const normKey = this.#normKey(key) if (this.#keys.has(normKey)) { const prevKey = this.#keys.get(normKey) this.#keys.delete(normKey) return super.delete(prevKey) } } has (key) { const normKey = this.#normKey(key) return this.#keys.has(normKey) && super.has(this.#keys.get(normKey)) } } PK]�\�w��IIarborist/lib/optional-set.jsnu�[���// when an optional dep fails to install, we need to remove the branch of the // graph up to the first optionalDependencies, as well as any nodes that are // only required by other nodes in the set. // // This function finds the set of nodes that will need to be removed in that // case. // // Note that this is *only* going to work with trees where calcDepFlags // has been called, because we rely on the node.optional flag. const gatherDepSet = require('./gather-dep-set.js') const optionalSet = node => { if (!node.optional) { return new Set() } // start with the node, then walk up the dependency graph until we // get to the boundaries that define the optional set. since the // node is optional, we know that all paths INTO this area of the // graph are optional, but there may be non-optional dependencies // WITHIN the area. const set = new Set([node]) for (const node of set) { for (const edge of node.edgesIn) { if (!edge.optional) { set.add(edge.from) } } } // now that we've hit the boundary, gather the rest of the nodes in // the optional section. that's the set of dependencies that are only // depended upon by other nodes within the set, or optional dependencies // from outside the set. return gatherDepSet(set, edge => !edge.optional) } module.exports = optionalSet PK]�\��eearborist/lib/signals.jsnu�[���// copied from signal-exit // This is not the set of all possible signals. // // It IS, however, the set of all signals that trigger // an exit on either Linux or BSD systems. Linux is a // superset of the signal names supported on BSD, and // the unknown signals just fail to register, so we can // catch that easily enough. // // Don't bother with SIGKILL. It's uncatchable, which // means that we can't fire any callbacks anyway. // // If a user does happen to register a handler on a non- // fatal signal like SIGWINCH or something, and then // exit, it'll end up firing `process.emit('exit')`, so // the handler will be fired anyway. // // SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised // artificially, inherently leave the process in a // state from which it is not safe to try and enter JS // listeners. const platform = global.__ARBORIST_FAKE_PLATFORM__ || process.platform module.exports = [ 'SIGABRT', 'SIGALRM', 'SIGHUP', 'SIGINT', 'SIGTERM', ] if (platform !== 'win32') { module.exports.push( 'SIGVTALRM', 'SIGXCPU', 'SIGXFSZ', 'SIGUSR2', 'SIGTRAP', 'SIGSYS', 'SIGQUIT', 'SIGIOT' // should detect profiler and enable/disable accordingly. // see #21 // 'SIGPROF' ) } if (platform === 'linux') { module.exports.push( 'SIGIO', 'SIGPOLL', 'SIGPWR', 'SIGSTKFLT', 'SIGUNUSED' ) } PK]�\|�<��arborist/lib/relpath.jsnu�[���const { relative } = require('node:path') const relpath = (from, to) => relative(from, to).replace(/\\/g, '/') module.exports = relpath PK]�\��h�=*=*arborist/lib/yarn-lock.jsnu�[���// parse a yarn lock file // basic format // // <request spec>[, <request spec> ...]: // <key> <value> // <subkey>: // <key> <value> // // Assume that any key or value might be quoted, though that's only done // in practice if certain chars are in the string. When writing back, we follow // Yarn's rules for quoting, to cause minimal friction. // // The data format would support nested objects, but at this time, it // appears that yarn does not use that for anything, so in the interest // of a simpler parser algorithm, this implementation only supports a // single layer of sub objects. // // This doesn't deterministically define the shape of the tree, and so // cannot be used (on its own) for Arborist.loadVirtual. // But it can give us resolved, integrity, and version, which is useful // for Arborist.loadActual and for building the ideal tree. // // At the very least, when a yarn.lock file is present, we update it // along the way, and save it back in Shrinkwrap.save() // // NIHing this rather than using @yarnpkg/lockfile because that module // is an impenetrable 10kloc of webpack flow output, which is overkill // for something relatively simple and tailored to Arborist's use case. const localeCompare = require('@isaacs/string-locale-compare')('en') const consistentResolve = require('./consistent-resolve.js') const { dirname } = require('node:path') const { breadth } = require('treeverse') // Sort Yarn entries respecting the yarn.lock sort order const yarnEntryPriorities = { name: 1, version: 2, uid: 3, resolved: 4, integrity: 5, registry: 6, dependencies: 7, } const priorityThenLocaleCompare = (a, b) => { if (!yarnEntryPriorities[a] && !yarnEntryPriorities[b]) { return localeCompare(a, b) } /* istanbul ignore next */ return (yarnEntryPriorities[a] || 100) > (yarnEntryPriorities[b] || 100) ? 1 : -1 } const quoteIfNeeded = val => { if ( typeof val === 'boolean' || typeof val === 'number' || val.startsWith('true') || val.startsWith('false') || /[:\s\n\\",[\]]/g.test(val) || !/^[a-zA-Z]/g.test(val) ) { return JSON.stringify(val) } return val } // sort a key/value object into a string of JSON stringified keys and vals const sortKV = obj => Object.keys(obj) .sort(localeCompare) .map(k => ` ${quoteIfNeeded(k)} ${quoteIfNeeded(obj[k])}`) .join('\n') // for checking against previous entries const match = (p, n) => p.integrity && n.integrity ? p.integrity === n.integrity : p.resolved && n.resolved ? p.resolved === n.resolved : p.version && n.version ? p.version === n.version : true const prefix = `# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 ` const nullSymbol = Symbol('null') class YarnLock { static parse (data) { return new YarnLock().parse(data) } static fromTree (tree) { return new YarnLock().fromTree(tree) } constructor () { this.entries = null this.endCurrent() } endCurrent () { this.current = null this.subkey = nullSymbol } parse (data) { const ENTRY_START = /^[^\s].*:$/ const SUBKEY = /^ {2}[^\s]+:$/ const SUBVAL = /^ {4}[^\s]+ .+$/ const METADATA = /^ {2}[^\s]+ .+$/ this.entries = new Map() this.current = null const linere = /([^\r\n]*)\r?\n/gm let match let lineNum = 0 if (!/\n$/.test(data)) { data += '\n' } while (match = linere.exec(data)) { const line = match[1] lineNum++ if (line.charAt(0) === '#') { continue } if (line === '') { this.endCurrent() continue } if (ENTRY_START.test(line)) { this.endCurrent() const specs = this.splitQuoted(line.slice(0, -1), /, */) this.current = new YarnLockEntry(specs) specs.forEach(spec => this.entries.set(spec, this.current)) continue } if (SUBKEY.test(line)) { this.subkey = line.slice(2, -1) this.current[this.subkey] = {} continue } if (SUBVAL.test(line) && this.current && this.current[this.subkey]) { const subval = this.splitQuoted(line.trimLeft(), ' ') if (subval.length === 2) { this.current[this.subkey][subval[0]] = subval[1] continue } } // any other metadata if (METADATA.test(line) && this.current) { const metadata = this.splitQuoted(line.trimLeft(), ' ') if (metadata.length === 2) { // strip off the legacy shasum hashes if (metadata[0] === 'resolved') { metadata[1] = metadata[1].replace(/#.*/, '') } this.current[metadata[0]] = metadata[1] continue } } throw Object.assign(new Error('invalid or corrupted yarn.lock file'), { position: match.index, content: match[0], line: lineNum, }) } this.endCurrent() return this } splitQuoted (str, delim) { // a,"b,c",d"e,f => ['a','"b','c"','d"e','f'] => ['a','b,c','d"e','f'] const split = str.split(delim) const out = [] let o = 0 for (let i = 0; i < split.length; i++) { const chunk = split[i] if (/^".*"$/.test(chunk)) { out[o++] = chunk.trim().slice(1, -1) } else if (/^"/.test(chunk)) { let collect = chunk.trimLeft().slice(1) while (++i < split.length) { const n = split[i] // something that is not a slash, followed by an even number // of slashes then a " then end => ending on an unescaped " if (/[^\\](\\\\)*"$/.test(n)) { collect += n.trimRight().slice(0, -1) break } else { collect += n } } out[o++] = collect } else { out[o++] = chunk.trim() } } return out } toString () { return prefix + [...new Set([...this.entries.values()])] .map(e => e.toString()) .sort((a, b) => localeCompare(a.replace(/"/g, ''), b.replace(/"/g, ''))).join('\n\n') + '\n' } fromTree (tree) { this.entries = new Map() // walk the tree in a deterministic order, breadth-first, alphabetical breadth({ tree, visit: node => this.addEntryFromNode(node), getChildren: node => [...node.children.values(), ...node.fsChildren] .sort((a, b) => a.depth - b.depth || localeCompare(a.name, b.name)), }) return this } addEntryFromNode (node) { const specs = [...node.edgesIn] .map(e => `${node.name}@${e.spec}`) .sort(localeCompare) // Note: // yarn will do excessive duplication in a case like this: // root -> (x@1.x, y@1.x, z@1.x) // y@1.x -> (x@1.1, z@2.x) // z@1.x -> () // z@2.x -> (x@1.x) // // where x@1.2 exists, because the "x@1.x" spec will *always* resolve // to x@1.2, which doesn't work for y's dep on x@1.1, so you'll get this: // // root // +-- x@1.2.0 // +-- y // | +-- x@1.1.0 // | +-- z@2 // | +-- x@1.2.0 // +-- z@1 // // instead of this more deduped tree that arborist builds by default: // // root // +-- x@1.2.0 (dep is x@1.x, from root) // +-- y // | +-- x@1.1.0 // | +-- z@2 (dep on x@1.x deduped to x@1.1.0 under y) // +-- z@1 // // In order to not create an invalid yarn.lock file with conflicting // entries, AND not tell yarn to create an invalid tree, we need to // ignore the x@1.x spec coming from z, since it's already in the entries. // // So, if the integrity and resolved don't match a previous entry, skip it. // We call this method on shallower nodes first, so this is fine. const n = this.entryDataFromNode(node) let priorEntry = null const newSpecs = [] for (const s of specs) { const prev = this.entries.get(s) // no previous entry for this spec at all, so it's new if (!prev) { // if we saw a match already, then assign this spec to it as well if (priorEntry) { priorEntry.addSpec(s) } else { newSpecs.push(s) } continue } const m = match(prev, n) // there was a prior entry, but a different thing. skip this one if (!m) { continue } // previous matches, but first time seeing it, so already has this spec. // go ahead and add all the previously unseen specs, though if (!priorEntry) { priorEntry = prev for (const s of newSpecs) { priorEntry.addSpec(s) this.entries.set(s, priorEntry) } newSpecs.length = 0 continue } // have a prior entry matching n, and matching the prev we just saw // add the spec to it priorEntry.addSpec(s) this.entries.set(s, priorEntry) } // if we never found a matching prior, then this is a whole new thing if (!priorEntry) { const entry = Object.assign(new YarnLockEntry(newSpecs), n) for (const s of newSpecs) { this.entries.set(s, entry) } } else { // pick up any new info that we got for this node, so that we can // decorate with integrity/resolved/etc. Object.assign(priorEntry, n) } } entryDataFromNode (node) { const n = {} if (node.package.dependencies) { n.dependencies = node.package.dependencies } if (node.package.optionalDependencies) { n.optionalDependencies = node.package.optionalDependencies } if (node.version) { n.version = node.version } if (node.resolved) { n.resolved = consistentResolve( node.resolved, node.isLink ? dirname(node.path) : node.path, node.root.path, true ) } if (node.integrity) { n.integrity = node.integrity } return n } static get Entry () { return YarnLockEntry } } class YarnLockEntry { #specs constructor (specs) { this.#specs = new Set(specs) this.resolved = null this.version = null this.integrity = null this.dependencies = null this.optionalDependencies = null } toString () { // sort objects to the bottom, then alphabetical return ([...this.#specs] .sort(localeCompare) .map(quoteIfNeeded).join(', ') + ':\n' + Object.getOwnPropertyNames(this) .filter(prop => this[prop] !== null) .sort(priorityThenLocaleCompare) .map(prop => typeof this[prop] !== 'object' ? ` ${prop} ${prop === 'integrity' ? this[prop] : JSON.stringify(this[prop])}\n` : Object.keys(this[prop]).length === 0 ? '' : ` ${prop}:\n` + sortKV(this[prop]) + '\n') .join('')).trim() } addSpec (spec) { this.#specs.add(spec) } } module.exports = YarnLock PK]�\�ٖ..arborist/lib/tree-check.jsnu�[���const debug = require('./debug.js') const checkTree = (tree, checkUnreachable = true) => { const log = [['START TREE CHECK', tree.path]] // this can only happen in tests where we have a "tree" object // that isn't actually a tree. if (!tree.root || !tree.root.inventory) { return tree } const { inventory } = tree.root const seen = new Set() const check = (node, via = tree, viaType = 'self') => { log.push([ 'CHECK', node && node.location, via && via.location, viaType, 'seen=' + seen.has(node), 'promise=' + !!(node && node.then), 'root=' + !!(node && node.isRoot), ]) if (!node || seen.has(node) || node.then) { return } seen.add(node) if (node.isRoot && node !== tree.root) { throw Object.assign(new Error('double root'), { node: node.path, realpath: node.realpath, tree: tree.path, root: tree.root.path, via: via.path, viaType, log, }) } if (node.root !== tree.root) { throw Object.assign(new Error('node from other root in tree'), { node: node.path, realpath: node.realpath, tree: tree.path, root: tree.root.path, via: via.path, viaType, otherRoot: node.root && node.root.path, log, }) } if (!node.isRoot && node.inventory.size !== 0) { throw Object.assign(new Error('non-root has non-zero inventory'), { node: node.path, tree: tree.path, root: tree.root.path, via: via.path, viaType, inventory: [...node.inventory.values()].map(node => [node.path, node.location]), log, }) } if (!node.isRoot && !inventory.has(node) && !node.dummy) { throw Object.assign(new Error('not in inventory'), { node: node.path, tree: tree.path, root: tree.root.path, via: via.path, viaType, log, }) } const devEdges = [...node.edgesOut.values()].filter(e => e.dev) if (!node.isTop && devEdges.length) { throw Object.assign(new Error('dev edges on non-top node'), { node: node.path, tree: tree.path, root: tree.root.path, via: via.path, viaType, devEdges: devEdges.map(e => [e.type, e.name, e.spec, e.error]), log, }) } if (node.path === tree.root.path && node !== tree.root && !tree.root.isLink) { throw Object.assign(new Error('node with same path as root'), { node: node.path, tree: tree.path, root: tree.root.path, via: via.path, viaType, log, }) } if (!node.isLink && node.path !== node.realpath) { throw Object.assign(new Error('non-link with mismatched path/realpath'), { node: node.path, tree: tree.path, realpath: node.realpath, root: tree.root.path, via: via.path, viaType, log, }) } const { parent, fsParent, target } = node check(parent, node, 'parent') check(fsParent, node, 'fsParent') check(target, node, 'target') log.push(['CHILDREN', node.location, ...node.children.keys()]) for (const kid of node.children.values()) { check(kid, node, 'children') } for (const kid of node.fsChildren) { check(kid, node, 'fsChildren') } for (const link of node.linksIn) { check(link, node, 'linksIn') } for (const top of node.tops) { check(top, node, 'tops') } log.push(['DONE', node.location]) } check(tree) if (checkUnreachable) { for (const node of inventory.values()) { if (!seen.has(node) && node !== tree.root) { throw Object.assign(new Error('unreachable in inventory'), { node: node.path, realpath: node.realpath, location: node.location, root: tree.root.path, tree: tree.path, log, }) } } } return tree } // should only ever run this check in debug mode module.exports = tree => tree debug(() => module.exports = checkTree) PK]�\zK�}arborist/lib/index.jsnu�[���module.exports = require('./arborist/index.js') module.exports.Arborist = module.exports module.exports.Node = require('./node.js') module.exports.Link = require('./link.js') module.exports.Edge = require('./edge.js') module.exports.Shrinkwrap = require('./shrinkwrap.js') PK]�\W���FFarborist/lib/vuln.jsnu�[���// An object representing a vulnerability either as the result of an // advisory or due to the package in question depending exclusively on // vulnerable versions of a dep. // // - name: package name // - range: Set of vulnerable versions // - nodes: Set of nodes affected // - effects: Set of vulns triggered by this one // - advisories: Set of advisories (including metavulns) causing this vuln. // All of the entries in via are vulnerability objects returned by // @npmcli/metavuln-calculator // - via: dependency vulns which cause this one const { satisfies, simplifyRange } = require('semver') const semverOpt = { loose: true, includePrerelease: true } const localeCompare = require('@isaacs/string-locale-compare')('en') const npa = require('npm-package-arg') const severities = new Map([ ['info', 0], [0, 'info'], ['low', 1], [1, 'low'], ['moderate', 2], [2, 'moderate'], ['high', 3], [3, 'high'], ['critical', 4], [4, 'critical'], [null, -1], [-1, null], ]) class Vuln { #range = null #simpleRange = null // assume a fix is available unless it hits a top node // that locks it in place, setting this false or {isSemVerMajor, version}. #fixAvailable = true constructor ({ name, advisory }) { this.name = name this.via = new Set() this.advisories = new Set() this.severity = null this.effects = new Set() this.topNodes = new Set() this.nodes = new Set() this.addAdvisory(advisory) this.packument = advisory.packument this.versions = advisory.versions } get fixAvailable () { return this.#fixAvailable } set fixAvailable (f) { this.#fixAvailable = f // if there's a fix available for this at the top level, it means that // it will also fix the vulns that led to it being there. to get there, // we set the vias to the most "strict" of fix availables. // - false: no fix is available // - {name, version, isSemVerMajor} fix requires -f, is semver major // - {name, version} fix requires -f, not semver major // - true: fix does not require -f // TODO: duped entries may require different fixes but the current // structure does not support this, so the case were a top level fix // corrects a duped entry may mean you have to run fix more than once for (const v of this.via) { // don't blow up on loops if (v.fixAvailable === f) { continue } if (f === false) { v.fixAvailable = f } else if (v.fixAvailable === true) { v.fixAvailable = f } else if (typeof f === 'object' && ( typeof v.fixAvailable !== 'object' || !v.fixAvailable.isSemVerMajor)) { v.fixAvailable = f } } } get isDirect () { for (const node of this.nodes.values()) { for (const edge of node.edgesIn) { if (edge.from.isProjectRoot || edge.from.isWorkspace) { return true } } } return false } testSpec (spec) { const specObj = npa(spec) if (!specObj.registry) { return true } if (specObj.subSpec) { spec = specObj.subSpec.rawSpec } for (const v of this.versions) { if (satisfies(v, spec) && !satisfies(v, this.range, semverOpt)) { return false } } return true } toJSON () { return { name: this.name, severity: this.severity, isDirect: this.isDirect, // just loop over the advisories, since via is only Vuln objects, // and calculated advisories have all the info we need via: [...this.advisories].map(v => v.type === 'metavuln' ? v.dependency : { ...v, versions: undefined, vulnerableVersions: undefined, id: undefined, }).sort((a, b) => localeCompare(String(a.source || a), String(b.source || b))), effects: [...this.effects].map(v => v.name).sort(localeCompare), range: this.simpleRange, nodes: [...this.nodes].map(n => n.location).sort(localeCompare), fixAvailable: this.#fixAvailable, } } addVia (v) { this.via.add(v) v.effects.add(this) // call the setter since we might add vias _after_ setting fixAvailable this.fixAvailable = this.fixAvailable } deleteVia (v) { this.via.delete(v) v.effects.delete(this) } deleteAdvisory (advisory) { this.advisories.delete(advisory) // make sure we have the max severity of all the vulns causing this one this.severity = null this.#range = null this.#simpleRange = null // refresh severity for (const advisory of this.advisories) { this.addAdvisory(advisory) } // remove any effects that are no longer relevant const vias = new Set([...this.advisories].map(a => a.dependency)) for (const via of this.via) { if (!vias.has(via.name)) { this.deleteVia(via) } } } addAdvisory (advisory) { this.advisories.add(advisory) const sev = severities.get(advisory.severity) this.#range = null this.#simpleRange = null if (sev > severities.get(this.severity)) { this.severity = advisory.severity } } get range () { if (!this.#range) { this.#range = [...this.advisories].map(v => v.range).join(' || ') } return this.#range } get simpleRange () { if (this.#simpleRange && this.#simpleRange === this.#range) { return this.#simpleRange } const versions = [...this.advisories][0].versions const range = this.range this.#simpleRange = simplifyRange(versions, range, semverOpt) this.#range = this.#simpleRange return this.#simpleRange } isVulnerable (node) { if (this.nodes.has(node)) { return true } const { version } = node.package if (!version) { return false } for (const v of this.advisories) { if (v.testVersion(version)) { this.nodes.add(node) return true } } return false } } module.exports = Vuln PK]�\A�A���arborist/lib/inventory.jsnu�[���// a class to manage an inventory and set of indexes of a set of objects based // on specific fields. const { hasOwnProperty } = Object.prototype const debug = require('./debug.js') const keys = ['name', 'license', 'funding', 'realpath', 'packageName'] class Inventory extends Map { #index constructor () { super() this.#index = new Map() for (const key of keys) { this.#index.set(key, new Map()) } } // XXX where is this used? get primaryKey () { return 'location' } // XXX where is this used? get indexes () { return [...keys] } * filter (fn) { for (const node of this.values()) { if (fn(node)) { yield node } } } add (node) { const root = super.get('') if (root && node.root !== root && node.root !== root.root) { debug(() => { throw Object.assign(new Error('adding external node to inventory'), { root: root.path, node: node.path, nodeRoot: node.root.path, }) }) return } const current = super.get(node.location) if (current) { if (current === node) { return } this.delete(current) } super.set(node.location, node) for (const [key, map] of this.#index.entries()) { let val if (hasOwnProperty.call(node, key)) { // if the node has the value, use it even if it's false val = node[key] } else if (key === 'license' && node.package) { // handling for the outdated "licenses" array, just pick the first one // also support the alternative spelling "licence" if (node.package.license) { val = node.package.license } else if (node.package.licence) { val = node.package.licence } else if (Array.isArray(node.package.licenses)) { val = node.package.licenses[0] } else if (Array.isArray(node.package.licences)) { val = node.package.licences[0] } } else if (node[key]) { val = node[key] } else { val = node.package?.[key] } if (val && typeof val === 'object') { // We currently only use license and funding /* istanbul ignore next - not used */ if (key === 'license') { val = val.type } else if (key === 'funding') { val = val.url } } if (!map.has(val)) { map.set(val, new Set()) } map.get(val).add(node) } } delete (node) { if (!this.has(node)) { return } super.delete(node.location) for (const [key, map] of this.#index.entries()) { let val if (node[key] !== undefined) { val = node[key] } else { val = node.package?.[key] } const set = map.get(val) if (set) { set.delete(node) if (set.size === 0) { map.delete(node[key]) } } } } query (key, val) { const map = this.#index.get(key) if (arguments.length === 2) { if (map.has(val)) { return map.get(val) } return new Set() } return map.keys() } has (node) { return super.get(node.location) === node } set () { throw new Error('direct set() not supported, use inventory.add(node)') } } module.exports = Inventory PK]�\�vq��arborist/lib/retire-path.jsnu�[���const crypto = require('node:crypto') const { dirname, basename, resolve } = require('node:path') // use sha1 because it's faster, and collisions extremely unlikely anyway const pathSafeHash = s => crypto.createHash('sha1') .update(s) .digest('base64') .replace(/[^a-zA-Z0-9]+/g, '') .slice(0, 8) const retirePath = from => { const d = dirname(from) const b = basename(from) const hash = pathSafeHash(from) return resolve(d, `.${b}-${hash}`) } module.exports = retirePath PK]�\�g<��arborist/lib/edge.jsnu�[���// An edge in the dependency graph // Represents a dependency relationship of some kind const util = require('node:util') const npa = require('npm-package-arg') const depValid = require('./dep-valid.js') class ArboristEdge { constructor (edge) { this.name = edge.name this.spec = edge.spec this.type = edge.type const edgeFrom = edge.from?.location const edgeTo = edge.to?.location const override = edge.overrides?.value if (edgeFrom != null) { this.from = edgeFrom } if (edgeTo) { this.to = edgeTo } if (edge.error) { this.error = edge.error } if (edge.peerConflicted) { this.peerConflicted = true } if (override) { this.overridden = override } } } class Edge { #accept #error #explanation #from #name #spec #to #type static types = Object.freeze([ 'prod', 'dev', 'optional', 'peer', 'peerOptional', 'workspace', ]) // XXX where is this used? static errors = Object.freeze([ 'DETACHED', 'MISSING', 'PEER LOCAL', 'INVALID', ]) constructor (options) { const { type, name, spec, accept, from, overrides } = options // XXX are all of these error states even possible? if (typeof spec !== 'string') { throw new TypeError('must provide string spec') } if (!Edge.types.includes(type)) { throw new TypeError(`invalid type: ${type}\n(valid types are: ${Edge.types.join(', ')})`) } if (type === 'workspace' && npa(spec).type !== 'directory') { throw new TypeError('workspace edges must be a symlink') } if (typeof name !== 'string') { throw new TypeError('must provide dependency name') } if (!from) { throw new TypeError('must provide "from" node') } if (accept !== undefined) { if (typeof accept !== 'string') { throw new TypeError('accept field must be a string if provided') } this.#accept = accept || '*' } if (overrides !== undefined) { this.overrides = overrides } this.#name = name this.#type = type this.#spec = spec this.#explanation = null this.#from = from from.edgesOut.get(this.#name)?.detach() from.addEdgeOut(this) this.reload(true) this.peerConflicted = false } satisfiedBy (node) { if (node.name !== this.#name) { return false } // NOTE: this condition means we explicitly do not support overriding // bundled or shrinkwrapped dependencies if (node.hasShrinkwrap || node.inShrinkwrap || node.inBundle) { return depValid(node, this.rawSpec, this.#accept, this.#from) } return depValid(node, this.spec, this.#accept, this.#from) } // return the edge data, and an explanation of how that edge came to be here explain (seen = []) { if (!this.#explanation) { const explanation = { type: this.#type, name: this.#name, spec: this.spec, } if (this.rawSpec !== this.spec) { explanation.rawSpec = this.rawSpec explanation.overridden = true } if (this.bundled) { explanation.bundled = this.bundled } if (this.error) { explanation.error = this.error } if (this.#from) { explanation.from = this.#from.explain(null, seen) } this.#explanation = explanation } return this.#explanation } get bundled () { return !!this.#from?.package?.bundleDependencies?.includes(this.#name) } get workspace () { return this.#type === 'workspace' } get prod () { return this.#type === 'prod' } get dev () { return this.#type === 'dev' } get optional () { return this.#type === 'optional' || this.#type === 'peerOptional' } get peer () { return this.#type === 'peer' || this.#type === 'peerOptional' } get type () { return this.#type } get name () { return this.#name } get rawSpec () { return this.#spec } get spec () { if (this.overrides?.value && this.overrides.value !== '*' && this.overrides.name === this.#name) { if (this.overrides.value.startsWith('$')) { const ref = this.overrides.value.slice(1) // we may be a virtual root, if we are we want to resolve reference overrides // from the real root, not the virtual one const pkg = this.#from.sourceReference ? this.#from.sourceReference.root.package : this.#from.root.package if (pkg.devDependencies?.[ref]) { return pkg.devDependencies[ref] } if (pkg.optionalDependencies?.[ref]) { return pkg.optionalDependencies[ref] } if (pkg.dependencies?.[ref]) { return pkg.dependencies[ref] } if (pkg.peerDependencies?.[ref]) { return pkg.peerDependencies[ref] } throw new Error(`Unable to resolve reference ${this.overrides.value}`) } return this.overrides.value } return this.#spec } get accept () { return this.#accept } get valid () { return !this.error } get missing () { return this.error === 'MISSING' } get invalid () { return this.error === 'INVALID' } get peerLocal () { return this.error === 'PEER LOCAL' } get error () { if (!this.#error) { if (!this.#to) { if (this.optional) { this.#error = null } else { this.#error = 'MISSING' } } else if (this.peer && this.#from === this.#to.parent && !this.#from.isTop) { this.#error = 'PEER LOCAL' } else if (!this.satisfiedBy(this.#to)) { this.#error = 'INVALID' } else { this.#error = 'OK' } } if (this.#error === 'OK') { return null } return this.#error } reload (hard = false) { this.#explanation = null if (this.#from.overrides) { this.overrides = this.#from.overrides.getEdgeRule(this) } else { delete this.overrides } const newTo = this.#from.resolve(this.#name) if (newTo !== this.#to) { if (this.#to) { this.#to.edgesIn.delete(this) } this.#to = newTo this.#error = null if (this.#to) { this.#to.addEdgeIn(this) } } else if (hard) { this.#error = null } } detach () { this.#explanation = null if (this.#to) { this.#to.edgesIn.delete(this) } this.#from.edgesOut.delete(this.#name) this.#to = null this.#error = 'DETACHED' this.#from = null } get from () { return this.#from } get to () { return this.#to } toJSON () { return new ArboristEdge(this) } [util.inspect.custom] () { return this.toJSON() } } module.exports = Edge PK]�\z��arborist/lib/calc-dep-flags.jsnu�[���const { depth } = require('treeverse') const calcDepFlags = (tree, resetRoot = true) => { if (resetRoot) { tree.dev = false tree.optional = false tree.devOptional = false tree.peer = false } const ret = depth({ tree, visit: node => calcDepFlagsStep(node), filter: node => node, getChildren: (node, tree) => [...tree.edgesOut.values()].map(edge => edge.to), }) return ret } const calcDepFlagsStep = (node) => { // This rewalk is necessary to handle cases where devDep and optional // or normal dependency graphs overlap deep in the dep graph. // Since we're only walking through deps that are not already flagged // as non-dev/non-optional, it's typically a very shallow traversal node.extraneous = false resetParents(node, 'extraneous') resetParents(node, 'dev') resetParents(node, 'peer') resetParents(node, 'devOptional') resetParents(node, 'optional') // for links, map their hierarchy appropriately if (node.isLink) { node.target.dev = node.dev node.target.optional = node.optional node.target.devOptional = node.devOptional node.target.peer = node.peer return calcDepFlagsStep(node.target) } node.edgesOut.forEach(({ peer, optional, dev, to }) => { // if the dep is missing, then its flags are already maximally unset if (!to) { return } // everything with any kind of edge into it is not extraneous to.extraneous = false // devOptional is the *overlap* of the dev and optional tree. // however, for convenience and to save an extra rewalk, we leave // it set when we are in *either* tree, and then omit it from the // package-lock if either dev or optional are set. const unsetDevOpt = !node.devOptional && !node.dev && !node.optional && !dev && !optional // if we are not in the devOpt tree, then we're also not in // either the dev or opt trees const unsetDev = unsetDevOpt || !node.dev && !dev const unsetOpt = unsetDevOpt || !node.optional && !optional const unsetPeer = !node.peer && !peer if (unsetPeer) { unsetFlag(to, 'peer') } if (unsetDevOpt) { unsetFlag(to, 'devOptional') } if (unsetDev) { unsetFlag(to, 'dev') } if (unsetOpt) { unsetFlag(to, 'optional') } }) return node } const resetParents = (node, flag) => { if (node[flag]) { return } for (let p = node; p && (p === node || p[flag]); p = p.resolveParent) { p[flag] = false } } // typically a short walk, since it only traverses deps that have the flag set. const unsetFlag = (node, flag) => { if (node[flag]) { node[flag] = false depth({ tree: node, visit: node => { node.extraneous = node[flag] = false if (node.isLink) { node.target.extraneous = node.target[flag] = false } }, getChildren: node => { const children = [] for (const edge of node.target.edgesOut.values()) { if (edge.to && edge.to[flag] && (flag !== 'peer' && edge.type === 'peer' || edge.type === 'prod') ) { children.push(edge.to) } } return children }, }) } } module.exports = calcDepFlags PK]�\#�����arborist/lib/shrinkwrap.jsnu�[���// a module that manages a shrinkwrap file (npm-shrinkwrap.json or // package-lock.json). // Increment whenever the lockfile version updates // v1 - npm <=6 // v2 - arborist v1, npm v7, backwards compatible with v1, add 'packages' // v3 will drop the 'dependencies' field, backwards comp with v2, not v1 // // We cannot bump to v3 until npm v6 is out of common usage, and // definitely not before npm v8. const localeCompare = require('@isaacs/string-locale-compare')('en') const defaultLockfileVersion = 3 // for comparing nodes to yarn.lock entries const mismatch = (a, b) => a && b && a !== b // this.tree => the root node for the tree (ie, same path as this) // - Set the first time we do `this.add(node)` for a path matching this.path // // this.add(node) => // - decorate the node with the metadata we have, if we have it, and it matches // - add to the map of nodes needing to be committed, so that subsequent // changes are captured when we commit that location's metadata. // // this.commit() => // - commit all nodes awaiting update to their metadata entries // - re-generate this.data and this.yarnLock based on this.tree // // Note that between this.add() and this.commit(), `this.data` will be out of // date! Always call `commit()` before relying on it. // // After calling this.commit(), any nodes not present in the tree will have // been removed from the shrinkwrap data as well. const { log } = require('proc-log') const YarnLock = require('./yarn-lock.js') const { readFile, readdir, readlink, rm, stat, writeFile, } = require('node:fs/promises') const { resolve, basename, relative } = require('node:path') const specFromLock = require('./spec-from-lock.js') const versionFromTgz = require('./version-from-tgz.js') const npa = require('npm-package-arg') const pkgJson = require('@npmcli/package-json') const parseJSON = require('parse-conflict-json') const stringify = require('json-stringify-nice') const swKeyOrder = [ 'name', 'version', 'lockfileVersion', 'resolved', 'integrity', 'requires', 'packages', 'dependencies', ] // used to rewrite from yarn registry to npm registry const yarnRegRe = /^https?:\/\/registry\.yarnpkg\.com\// const npmRegRe = /^https?:\/\/registry\.npmjs\.org\// // sometimes resolved: is weird or broken, or something npa can't handle const specFromResolved = resolved => { try { return npa(resolved) } catch (er) { return {} } } const relpath = require('./relpath.js') const consistentResolve = require('./consistent-resolve.js') const { overrideResolves } = require('./override-resolves.js') const pkgMetaKeys = [ // note: name is included if necessary, for alias packages 'version', 'dependencies', 'peerDependencies', 'peerDependenciesMeta', 'optionalDependencies', 'bundleDependencies', 'acceptDependencies', 'funding', 'engines', 'os', 'cpu', '_integrity', 'license', '_hasShrinkwrap', 'hasInstallScript', 'bin', 'deprecated', 'workspaces', ] const nodeMetaKeys = [ 'integrity', 'inBundle', 'hasShrinkwrap', 'hasInstallScript', ] const metaFieldFromPkg = (pkg, key) => { const val = pkg[key] if (val) { // get only the license type, not the full object if (key === 'license' && typeof val === 'object' && val.type) { return val.type } // skip empty objects and falsey values if (typeof val !== 'object' || Object.keys(val).length) { return val } } return null } // check to make sure that there are no packages newer than or missing from the hidden lockfile const assertNoNewer = async (path, data, lockTime, dir, seen) => { const base = basename(dir) const isNM = dir !== path && base === 'node_modules' const isScope = dir !== path && base.startsWith('@') const isParent = (dir === path) || isNM || isScope const parent = isParent ? dir : resolve(dir, 'node_modules') const rel = relpath(path, dir) seen.add(rel) let entries if (dir === path) { entries = [{ name: 'node_modules', isDirectory: () => true }] } else { const { mtime: dirTime } = await stat(dir) if (dirTime > lockTime) { throw new Error(`out of date, updated: ${rel}`) } if (!isScope && !isNM && !data.packages[rel]) { throw new Error(`missing from lockfile: ${rel}`) } entries = await readdir(parent, { withFileTypes: true }).catch(() => []) } // TODO limit concurrency here, this is recursive await Promise.all(entries.map(async dirent => { const child = resolve(parent, dirent.name) if (dirent.isDirectory() && !dirent.name.startsWith('.')) { await assertNoNewer(path, data, lockTime, child, seen) } else if (dirent.isSymbolicLink()) { const target = resolve(parent, await readlink(child)) const tstat = await stat(target).catch( /* istanbul ignore next - windows */ () => null) seen.add(relpath(path, child)) /* istanbul ignore next - windows cannot do this */ if (tstat?.isDirectory() && !seen.has(relpath(path, target))) { await assertNoNewer(path, data, lockTime, target, seen) } } })) if (dir !== path) { return } // assert that all the entries in the lockfile were seen for (const loc in data.packages) { if (!seen.has(loc)) { throw new Error(`missing from node_modules: ${loc}`) } } } class Shrinkwrap { static get defaultLockfileVersion () { return defaultLockfileVersion } static load (options) { return new Shrinkwrap(options).load() } static get keyOrder () { return swKeyOrder } static async reset (options) { // still need to know if it was loaded from the disk, but don't // bother reading it if we're gonna just throw it away. const s = new Shrinkwrap(options) s.reset() const [sw, lock] = await s.resetFiles // XXX this is duplicated in this.load(), but using loadFiles instead of resetFiles if (s.hiddenLockfile) { s.filename = resolve(s.path, 'node_modules/.package-lock.json') } else if (s.shrinkwrapOnly || sw) { s.filename = resolve(s.path, 'npm-shrinkwrap.json') } else { s.filename = resolve(s.path, 'package-lock.json') } s.loadedFromDisk = !!(sw || lock) // TODO what uses this? s.type = basename(s.filename) return s } static metaFromNode (node, path, options = {}) { if (node.isLink) { return { resolved: relpath(path, node.realpath), link: true, } } const meta = {} for (const key of pkgMetaKeys) { const val = metaFieldFromPkg(node.package, key) if (val) { meta[key.replace(/^_/, '')] = val } } // we only include name if different from the node path name, and for the // root to help prevent churn based on the name of the directory the // project is in const pname = node.packageName if (pname && (node === node.root || pname !== node.name)) { meta.name = pname } if (node.isTop && node.package.devDependencies) { meta.devDependencies = node.package.devDependencies } for (const key of nodeMetaKeys) { if (node[key]) { meta[key] = node[key] } } const resolved = consistentResolve(node.resolved, node.path, path, true) // hide resolved from registry dependencies. if (!resolved) { // no-op } else if (node.isRegistryDependency) { meta.resolved = overrideResolves(resolved, options) } else { meta.resolved = resolved } if (node.extraneous) { meta.extraneous = true } else { if (node.peer) { meta.peer = true } if (node.dev) { meta.dev = true } if (node.optional) { meta.optional = true } if (node.devOptional && !node.dev && !node.optional) { meta.devOptional = true } } return meta } #awaitingUpdate = new Map() constructor (options = {}) { const { path, indent = 2, newline = '\n', shrinkwrapOnly = false, hiddenLockfile = false, lockfileVersion, resolveOptions = {}, } = options if (hiddenLockfile) { this.lockfileVersion = 3 } else if (lockfileVersion) { this.lockfileVersion = parseInt(lockfileVersion, 10) } else { this.lockfileVersion = null } this.tree = null this.path = resolve(path || '.') this.filename = null this.data = null this.indent = indent this.newline = newline this.loadedFromDisk = false this.type = null this.yarnLock = null this.hiddenLockfile = hiddenLockfile this.loadingError = null this.resolveOptions = resolveOptions // only load npm-shrinkwrap.json in dep trees, not package-lock this.shrinkwrapOnly = shrinkwrapOnly } // check to see if a spec is present in the yarn.lock file, and if so, // if we should use it, and what it should resolve to. This is only // done when we did not load a shrinkwrap from disk. Also, decorate // the options object if provided with the resolved and integrity that // we expect. checkYarnLock (spec, options = {}) { spec = npa(spec) const { yarnLock, loadedFromDisk } = this const useYarnLock = yarnLock && !loadedFromDisk const fromYarn = useYarnLock && yarnLock.entries.get(spec.raw) if (fromYarn && fromYarn.version) { // if it's the yarn or npm default registry, use the version as // our effective spec. if it's any other kind of thing, use that. const { resolved, version, integrity } = fromYarn const isYarnReg = spec.registry && yarnRegRe.test(resolved) const isnpmReg = spec.registry && !isYarnReg && npmRegRe.test(resolved) const isReg = isnpmReg || isYarnReg // don't use the simple version if the "registry" url is // something else entirely! const tgz = isReg && versionFromTgz(spec.name, resolved) || {} let yspec = resolved if (tgz.name === spec.name && tgz.version === version) { yspec = version } else if (isReg && tgz.name && tgz.version) { yspec = `npm:${tgz.name}@${tgz.version}` } if (yspec) { options.resolved = resolved.replace(yarnRegRe, 'https://registry.npmjs.org/') options.integrity = integrity return npa(`${spec.name}@${yspec}`) } } return spec } // throw away the shrinkwrap data so we can start fresh // still worth doing a load() first so we know which files to write. reset () { this.tree = null this.#awaitingUpdate = new Map() const lockfileVersion = this.lockfileVersion || defaultLockfileVersion this.originalLockfileVersion = lockfileVersion this.data = { lockfileVersion, requires: true, packages: {}, dependencies: {}, } } // files to potentially read from and write to, in order of priority get #filenameSet () { if (this.shrinkwrapOnly) { return [`${this.path}/npm-shrinkwrap.json`] } if (this.hiddenLockfile) { return [`${this.path}/node_modules/.package-lock.json`] } return [ `${this.path}/npm-shrinkwrap.json`, `${this.path}/package-lock.json`, `${this.path}/yarn.lock`, ] } get loadFiles () { return Promise.all( this.#filenameSet.map(file => file && readFile(file, 'utf8').then(d => d, er => { /* istanbul ignore else - can't test without breaking module itself */ if (er.code === 'ENOENT') { return '' } else { throw er } })) ) } get resetFiles () { // slice out yarn, we only care about lock or shrinkwrap when checking // this way, since we're not actually loading the full lock metadata return Promise.all(this.#filenameSet.slice(0, 2) .map(file => file && stat(file).then(st => st.isFile(), er => { /* istanbul ignore else - can't test without breaking module itself */ if (er.code === 'ENOENT') { return null } else { throw er } }) ) ) } inferFormattingOptions (packageJSONData) { const { [Symbol.for('indent')]: indent, [Symbol.for('newline')]: newline, } = packageJSONData if (indent !== undefined) { this.indent = indent } if (newline !== undefined) { this.newline = newline } } async load () { // we don't need to load package-lock.json except for top of tree nodes, // only npm-shrinkwrap.json. let data try { const [sw, lock, yarn] = await this.loadFiles data = sw || lock || '{}' // use shrinkwrap only for deps, otherwise prefer package-lock // and ignore npm-shrinkwrap if both are present. // TODO: emit a warning here or something if both are present. if (this.hiddenLockfile) { this.filename = resolve(this.path, 'node_modules/.package-lock.json') } else if (this.shrinkwrapOnly || sw) { this.filename = resolve(this.path, 'npm-shrinkwrap.json') } else { this.filename = resolve(this.path, 'package-lock.json') } this.type = basename(this.filename) this.loadedFromDisk = Boolean(sw || lock) if (yarn) { this.yarnLock = new YarnLock() // ignore invalid yarn data. we'll likely clobber it later anyway. try { this.yarnLock.parse(yarn) } catch { // ignore errors } } data = parseJSON(data) this.inferFormattingOptions(data) if (this.hiddenLockfile && data.packages) { // add a few ms just to account for jitter const lockTime = +(await stat(this.filename)).mtime + 10 await assertNoNewer(this.path, data, lockTime, this.path, new Set()) } // all good! hidden lockfile is the newest thing in here. } catch (er) { /* istanbul ignore else */ if (typeof this.filename === 'string') { const rel = relpath(this.path, this.filename) log.verbose('shrinkwrap', `failed to load ${rel}`, er.message) } else { log.verbose('shrinkwrap', `failed to load ${this.path}`, er.message) } this.loadingError = er this.loadedFromDisk = false this.ancientLockfile = false data = {} } // auto convert v1 lockfiles to v3 // leave v2 in place unless configured // v3 by default let lockfileVersion = defaultLockfileVersion if (this.lockfileVersion) { lockfileVersion = this.lockfileVersion } else if (data.lockfileVersion && data.lockfileVersion !== 1) { lockfileVersion = data.lockfileVersion } this.data = { ...data, lockfileVersion, requires: true, packages: data.packages || {}, dependencies: data.dependencies || {}, } this.originalLockfileVersion = data.lockfileVersion // use default if it wasn't explicitly set, and the current file is // less than our default. otherwise, keep whatever is in the file, // unless we had an explicit setting already. if (!this.lockfileVersion) { this.lockfileVersion = this.data.lockfileVersion = lockfileVersion } this.ancientLockfile = this.loadedFromDisk && !(data.lockfileVersion >= 2) && !data.requires // load old lockfile deps into the packages listing if (data.dependencies && !data.packages) { let pkg try { pkg = await pkgJson.normalize(this.path) pkg = pkg.content } catch { pkg = {} } this.#loadAll('', null, this.data) this.#fixDependencies(pkg) } return this } #loadAll (location, name, lock) { // migrate a v1 package lock to the new format. const meta = this.#metaFromLock(location, name, lock) // dependencies nested under a link are actually under the link target if (meta.link) { location = meta.resolved } if (lock.dependencies) { for (const name in lock.dependencies) { const loc = location + (location ? '/' : '') + 'node_modules/' + name this.#loadAll(loc, name, lock.dependencies[name]) } } } // v1 lockfiles track the optional/dev flags, but they don't tell us // which thing had what kind of dep on what other thing, so we need // to correct that now, or every link will be considered prod #fixDependencies (pkg) { // we need the root package.json because legacy shrinkwraps just // have requires:true at the root level, which is even less useful // than merging all dep types into one object. const root = this.data.packages[''] for (const key of pkgMetaKeys) { const val = metaFieldFromPkg(pkg, key) if (val) { root[key.replace(/^_/, '')] = val } } for (const loc in this.data.packages) { const meta = this.data.packages[loc] if (!meta.requires || !loc) { continue } // resolve each require to a meta entry // if this node isn't optional, but the dep is, then it's an optionalDep // likewise for dev deps. // This isn't perfect, but it's a pretty good approximation, and at // least gets us out of having all 'prod' edges, which throws off the // buildIdealTree process for (const name in meta.requires) { const dep = this.#resolveMetaNode(loc, name) // this overwrites the false value set above // default to dependencies if the dep just isn't in the tree, which // maybe should be an error, since it means that the shrinkwrap is // invalid, but we can't do much better without any info. let depType = 'dependencies' /* istanbul ignore else - dev deps are only for the root level */ if (dep?.optional && !meta.optional) { depType = 'optionalDependencies' } else if (dep?.dev && !meta.dev) { // XXX is this even reachable? depType = 'devDependencies' } if (!meta[depType]) { meta[depType] = {} } meta[depType][name] = meta.requires[name] } delete meta.requires } } #resolveMetaNode (loc, name) { for (let path = loc; true; path = path.replace(/(^|\/)[^/]*$/, '')) { const check = `${path}${path ? '/' : ''}node_modules/${name}` if (this.data.packages[check]) { return this.data.packages[check] } if (!path) { break } } return null } #lockFromLoc (lock, path, i = 0) { if (!lock) { return null } if (path[i] === '') { i++ } if (i >= path.length) { return lock } if (!lock.dependencies) { return null } return this.#lockFromLoc(lock.dependencies[path[i]], path, i + 1) } // pass in a path relative to the root path, or an absolute path, // get back a /-normalized location based on root path. #pathToLoc (path) { return relpath(this.path, resolve(this.path, path)) } delete (nodePath) { if (!this.data) { throw new Error('run load() before getting or setting data') } const location = this.#pathToLoc(nodePath) this.#awaitingUpdate.delete(location) delete this.data.packages[location] const path = location.split(/(?:^|\/)node_modules\//) const name = path.pop() const pLock = this.#lockFromLoc(this.data, path) if (pLock && pLock.dependencies) { delete pLock.dependencies[name] } } get (nodePath) { if (!this.data) { throw new Error('run load() before getting or setting data') } const location = this.#pathToLoc(nodePath) if (this.#awaitingUpdate.has(location)) { this.#updateWaitingNode(location) } // first try to get from the newer spot, which we know has // all the things we need. if (this.data.packages[location]) { return this.data.packages[location] } // otherwise, fall back to the legacy metadata, and hope for the best // get the node in the shrinkwrap corresponding to this spot const path = location.split(/(?:^|\/)node_modules\//) const name = path[path.length - 1] const lock = this.#lockFromLoc(this.data, path) return this.#metaFromLock(location, name, lock) } #metaFromLock (location, name, lock) { // This function tries as hard as it can to figure out the metadata // from a lockfile which may be outdated or incomplete. Since v1 // lockfiles used the "version" field to contain a variety of // different possible types of data, this gets a little complicated. if (!lock) { return {} } // try to figure out a npm-package-arg spec from the lockfile entry // This will return null if we could not get anything valid out of it. const spec = specFromLock(name, lock, this.path) if (spec.type === 'directory') { // the "version" was a file: url to a non-tarball path // this is a symlink dep. We don't store much metadata // about symlinks, just the target. const target = relpath(this.path, spec.fetchSpec) this.data.packages[location] = { link: true, resolved: target, } // also save the link target, omitting version since we don't know // what it is, but we know it isn't a link to itself! if (!this.data.packages[target]) { this.#metaFromLock(target, name, { ...lock, version: null }) } return this.data.packages[location] } const meta = {} // when calling loadAll we'll change these into proper dep objects if (lock.requires && typeof lock.requires === 'object') { meta.requires = lock.requires } if (lock.optional) { meta.optional = true } if (lock.dev) { meta.dev = true } // the root will typically have a name from the root project's // package.json file. if (location === '') { meta.name = lock.name } // if we have integrity, save it now. if (lock.integrity) { meta.integrity = lock.integrity } if (lock.version && !lock.integrity) { // this is usually going to be a git url or symlink, but it could // also be a registry dependency that did not have integrity at // the time it was saved. // Symlinks were already handled above, so that leaves git. // // For git, always save the full SSH url. we'll actually fetch the // tgz most of the time, since it's faster, but it won't work for // private repos, and we can't get back to the ssh from the tgz, // so we store the ssh instead. // For unknown git hosts, just resolve to the raw spec in lock.version if (spec.type === 'git') { meta.resolved = consistentResolve(spec, this.path, this.path) // return early because there is nothing else we can do with this return this.data.packages[location] = meta } else if (spec.registry) { // registry dep that didn't save integrity. grab the version, and // fall through to pick up the resolved and potentially name. meta.version = lock.version } // only other possible case is a tarball without integrity. // fall through to do what we can with the filename later. } // at this point, we know that the spec is either a registry dep // (ie, version, because locking, which means a resolved url), // or a remote dep, or file: url. Remote deps and file urls // have a fetchSpec equal to the fully resolved thing. // Registry deps, we take what's in the lockfile. if (lock.resolved || (spec.type && !spec.registry)) { if (spec.registry) { meta.resolved = lock.resolved } else if (spec.type === 'file') { meta.resolved = consistentResolve(spec, this.path, this.path, true) } else if (spec.fetchSpec) { meta.resolved = spec.fetchSpec } } // at this point, if still we don't have a version, do our best to // infer it from the tarball url/file. This works a surprising // amount of the time, even though it's not guaranteed. if (!meta.version) { if (spec.type === 'file' || spec.type === 'remote') { const fromTgz = versionFromTgz(spec.name, spec.fetchSpec) || versionFromTgz(spec.name, meta.resolved) if (fromTgz) { meta.version = fromTgz.version if (fromTgz.name !== name) { meta.name = fromTgz.name } } } else if (spec.type === 'alias') { meta.name = spec.subSpec.name meta.version = spec.subSpec.fetchSpec } else if (spec.type === 'version') { meta.version = spec.fetchSpec } // ok, I did my best! good luck! } if (lock.bundled) { meta.inBundle = true } // save it for next time return this.data.packages[location] = meta } add (node) { if (!this.data) { throw new Error('run load() before getting or setting data') } // will be actually updated on read const loc = relpath(this.path, node.path) if (node.path === this.path) { this.tree = node } // if we have metadata about this node, and it's a match, then // try to decorate it. if (node.resolved === null || node.integrity === null) { const { resolved, integrity, hasShrinkwrap, version, } = this.get(node.path) let pathFixed = null if (resolved) { if (!/^file:/.test(resolved)) { pathFixed = resolved } else { pathFixed = `file:${resolve(this.path, resolved.slice(5)).replace(/#/g, '%23')}` } } // if we have one, only set the other if it matches // otherwise it could be for a completely different thing. const resolvedOk = !resolved || !node.resolved || node.resolved === pathFixed const integrityOk = !integrity || !node.integrity || node.integrity === integrity const versionOk = !version || !node.version || version === node.version const allOk = (resolved || integrity || version) && resolvedOk && integrityOk && versionOk if (allOk) { node.resolved = node.resolved || pathFixed || null node.integrity = node.integrity || integrity || null node.hasShrinkwrap = node.hasShrinkwrap || hasShrinkwrap || false } else { // try to read off the package or node itself const { resolved, integrity, hasShrinkwrap, } = Shrinkwrap.metaFromNode(node, this.path, this.resolveOptions) node.resolved = node.resolved || resolved || null node.integrity = node.integrity || integrity || null node.hasShrinkwrap = node.hasShrinkwrap || hasShrinkwrap || false } } this.#awaitingUpdate.set(loc, node) } addEdge (edge) { if (!this.yarnLock || !edge.valid) { return } const { to: node } = edge // if it's already set up, nothing to do if (node.resolved !== null && node.integrity !== null) { return } // if the yarn lock is empty, nothing to do if (!this.yarnLock.entries || !this.yarnLock.entries.size) { return } // we relativize the path here because that's how it shows up in the lock // XXX why is this different from pathFixed in this.add?? let pathFixed = null if (node.resolved) { if (!/file:/.test(node.resolved)) { pathFixed = node.resolved } else { pathFixed = consistentResolve(node.resolved, node.path, this.path, true) } } const spec = npa(`${node.name}@${edge.spec}`) const entry = this.yarnLock.entries.get(`${node.name}@${edge.spec}`) if (!entry || mismatch(node.version, entry.version) || mismatch(node.integrity, entry.integrity) || mismatch(pathFixed, entry.resolved)) { return } if (entry.resolved && yarnRegRe.test(entry.resolved) && spec.registry) { entry.resolved = entry.resolved.replace(yarnRegRe, 'https://registry.npmjs.org/') } node.integrity = node.integrity || entry.integrity || null node.resolved = node.resolved || consistentResolve(entry.resolved, this.path, node.path) || null this.#awaitingUpdate.set(relpath(this.path, node.path), node) } #updateWaitingNode (loc) { const node = this.#awaitingUpdate.get(loc) this.#awaitingUpdate.delete(loc) this.data.packages[loc] = Shrinkwrap.metaFromNode( node, this.path, this.resolveOptions) } commit () { if (this.tree) { if (this.yarnLock) { this.yarnLock.fromTree(this.tree) } const root = Shrinkwrap.metaFromNode( this.tree.target, this.path, this.resolveOptions) this.data.packages = {} if (Object.keys(root).length) { this.data.packages[''] = root } for (const node of this.tree.root.inventory.values()) { // only way this.tree is not root is if the root is a link to it if (node === this.tree || node.isRoot || node.location === '') { continue } const loc = relpath(this.path, node.path) this.data.packages[loc] = Shrinkwrap.metaFromNode( node, this.path, this.resolveOptions) } } else if (this.#awaitingUpdate.size > 0) { for (const loc of this.#awaitingUpdate.keys()) { this.#updateWaitingNode(loc) } } // if we haven't set it by now, use the default if (!this.lockfileVersion) { this.lockfileVersion = defaultLockfileVersion } this.data.lockfileVersion = this.lockfileVersion // hidden lockfiles don't include legacy metadata or a root entry if (this.hiddenLockfile) { delete this.data.packages[''] delete this.data.dependencies } else if (this.tree && this.lockfileVersion <= 3) { this.#buildLegacyLockfile(this.tree, this.data) } // lf version 1 = dependencies only // lf version 2 = dependencies and packages // lf version 3 = packages only if (this.lockfileVersion >= 3) { const { dependencies, ...data } = this.data return data } else if (this.lockfileVersion < 2) { const { packages, ...data } = this.data return data } else { return { ...this.data } } } #buildLegacyLockfile (node, lock, path = []) { if (node === this.tree) { // the root node lock.name = node.packageName || node.name if (node.version) { lock.version = node.version } } // npm v6 and before tracked 'from', meaning "the request that led // to this package being installed". However, that's inherently // racey and non-deterministic in a world where deps are deduped // ahead of fetch time. In order to maintain backwards compatibility // with v6 in the lockfile, we do this trick where we pick a valid // dep link out of the edgesIn set. Choose the edge with the fewest // number of `node_modules` sections in the requestor path, and then // lexically sort afterwards. const edge = [...node.edgesIn].filter(e => e.valid).sort((a, b) => { const aloc = a.from.location.split('node_modules') const bloc = b.from.location.split('node_modules') /* istanbul ignore next - sort calling order is indeterminate */ if (aloc.length > bloc.length) { return 1 } if (bloc.length > aloc.length) { return -1 } return localeCompare(aloc[aloc.length - 1], bloc[bloc.length - 1]) })[0] const res = consistentResolve(node.resolved, this.path, this.path, true) const rSpec = specFromResolved(res) // if we don't have anything (ie, it's extraneous) then use the resolved // value as if that was where we got it from, since at least it's true. // if we don't have either, just an empty object so nothing matches below. // This will effectively just save the version and resolved, as if it's // a standard version/range dep, which is a reasonable default. let spec = rSpec if (edge) { spec = npa.resolve(node.name, edge.spec, edge.from.realpath) } if (node.isLink) { lock.version = `file:${relpath(this.path, node.realpath).replace(/#/g, '%23')}` } else if (spec && (spec.type === 'file' || spec.type === 'remote')) { lock.version = spec.saveSpec } else if (spec && spec.type === 'git' || rSpec.type === 'git') { lock.version = node.resolved /* istanbul ignore else - don't think there are any cases where a git * spec (or indeed, ANY npa spec) doesn't have a .raw member */ if (spec.raw) { lock.from = spec.raw } } else if (!node.isRoot && node.package && node.packageName && node.packageName !== node.name) { lock.version = `npm:${node.packageName}@${node.version}` } else if (node.package && node.version) { lock.version = node.version } if (node.inDepBundle) { lock.bundled = true } // when we didn't resolve to git, file, or dir, and didn't request // git, file, dir, or remote, then the resolved value is necessary. if (node.resolved && !node.isLink && rSpec.type !== 'git' && rSpec.type !== 'file' && rSpec.type !== 'directory' && spec.type !== 'directory' && spec.type !== 'git' && spec.type !== 'file' && spec.type !== 'remote') { lock.resolved = overrideResolves(node.resolved, this.resolveOptions) } if (node.integrity) { lock.integrity = node.integrity } if (node.extraneous) { lock.extraneous = true } else if (!node.isLink) { if (node.peer) { lock.peer = true } if (node.devOptional && !node.dev && !node.optional) { lock.devOptional = true } if (node.dev) { lock.dev = true } if (node.optional) { lock.optional = true } } const depender = node.target if (depender.edgesOut.size > 0) { if (node !== this.tree) { const entries = [...depender.edgesOut.entries()] lock.requires = entries.reduce((set, [k, v]) => { // omit peer deps from legacy lockfile requires field, because // npm v6 doesn't handle peer deps, and this triggers some bad // behavior if the dep can't be found in the dependencies list. const { spec, peer } = v if (peer) { return set } if (spec.startsWith('file:')) { // turn absolute file: paths into relative paths from the node // this especially shows up with workspace edges when the root // node is also a workspace in the set. const p = resolve(node.realpath, spec.slice('file:'.length)) set[k] = `file:${relpath(node.realpath, p).replace(/#/g, '%23')}` } else { set[k] = spec } return set }, {}) } else { lock.requires = true } } // now we walk the children, putting them in the 'dependencies' object const { children } = node.target if (!children.size) { delete lock.dependencies } else { const kidPath = [...path, node.realpath] const dependencies = {} // skip any that are already in the descent path, so cyclical link // dependencies don't blow up with ELOOP. let found = false for (const [name, kid] of children.entries()) { if (path.includes(kid.realpath)) { continue } dependencies[name] = this.#buildLegacyLockfile(kid, {}, kidPath) found = true } if (found) { lock.dependencies = dependencies } } return lock } toJSON () { if (!this.data) { throw new Error('run load() before getting or setting data') } return this.commit() } toString (options = {}) { const data = this.toJSON() const { format = true } = options const defaultIndent = this.indent || 2 const indent = format === true ? defaultIndent : format || 0 const eol = format ? this.newline || '\n' : '' return stringify(data, swKeyOrder, indent).replace(/\n/g, eol) } save (options = {}) { if (!this.data) { throw new Error('run load() before saving data') } // This must be called before the lockfile conversion check below since it sets properties as part of `commit()` const json = this.toString(options) if ( !this.hiddenLockfile && this.originalLockfileVersion !== undefined && this.originalLockfileVersion !== this.lockfileVersion ) { log.warn( 'shrinkwrap', `Converting lock file (${relative(process.cwd(), this.filename)}) from v${this.originalLockfileVersion} -> v${this.lockfileVersion}` ) } return Promise.all([ writeFile(this.filename, json).catch(er => { if (this.hiddenLockfile) { // well, we did our best. // if we reify, and there's nothing there, then it might be lacking // a node_modules folder, but then the lockfile is not important. // Remove the file, so that in case there WERE deps, but we just // failed to update the file for some reason, it's not out of sync. return rm(this.filename, { recursive: true, force: true }) } throw er }), this.yarnLock && this.yarnLock.entries.size && writeFile(this.path + '/yarn.lock', this.yarnLock.toString()), ]) } } module.exports = Shrinkwrap PK]�\�(�~~arborist/lib/reset-dep-flags.jsnu�[���// Sometimes we need to actually do a walk from the root, because you can // have a cycle of deps that all depend on each other, but no path from root. // Also, since the ideal tree is loaded from the shrinkwrap, it had extraneous // flags set false that might now be actually extraneous, and dev/optional // flags that are also now incorrect. This method sets all flags to true, so // we can find the set that is actually extraneous. module.exports = tree => { for (const node of tree.inventory.values()) { node.extraneous = true node.dev = true node.devOptional = true node.peer = true node.optional = true } } PK]�\��5arborist/lib/debug.jsnu�[���// certain assertions we should do only when testing arborist itself, because // they are too expensive or aggressive and would break user programs if we // miss a situation where they are actually valid. // // call like this: // // /* istanbul ignore next - debug check */ // debug(() => { // if (someExpensiveCheck) // throw new Error('expensive check should have returned false') // }) // run in debug mode if explicitly requested, running arborist tests, // or working in the arborist project directory. const debug = process.env.ARBORIST_DEBUG !== '0' && ( process.env.ARBORIST_DEBUG === '1' || /\barborist\b/.test(process.env.NODE_DEBUG || '') || process.env.npm_package_name === '@npmcli/arborist' && ['test', 'snap'].includes(process.env.npm_lifecycle_event) || process.cwd() === require('node:path').resolve(__dirname, '..') ) module.exports = debug ? fn => fn() : () => {} const red = process.stderr.isTTY ? msg => `\x1B[31m${msg}\x1B[39m` : m => m module.exports.log = (...msg) => module.exports(() => { const { format } = require('node:util') const prefix = `\n${process.pid} ${red(format(msg.shift()))} ` msg = (prefix + format(...msg).trim().split('\n').join(prefix)).trim() /* eslint-disable-next-line no-console */ console.error(msg) }) PK]�\d�trvvarborist/lib/override-set.jsnu�[���const npa = require('npm-package-arg') const semver = require('semver') class OverrideSet { constructor ({ overrides, key, parent }) { this.parent = parent this.children = new Map() if (typeof overrides === 'string') { overrides = { '.': overrides } } // change a literal empty string to * so we can use truthiness checks on // the value property later if (overrides['.'] === '') { overrides['.'] = '*' } if (parent) { const spec = npa(key) if (!spec.name) { throw new Error(`Override without name: ${key}`) } this.name = spec.name spec.name = '' this.key = key this.keySpec = spec.toString() this.value = overrides['.'] || this.keySpec } for (const [key, childOverrides] of Object.entries(overrides)) { if (key === '.') { continue } const child = new OverrideSet({ parent: this, key, overrides: childOverrides, }) this.children.set(child.key, child) } } getEdgeRule (edge) { for (const rule of this.ruleset.values()) { if (rule.name !== edge.name) { continue } // if keySpec is * we found our override if (rule.keySpec === '*') { return rule } let spec = npa(`${edge.name}@${edge.spec}`) if (spec.type === 'alias') { spec = spec.subSpec } if (spec.type === 'git') { if (spec.gitRange && semver.intersects(spec.gitRange, rule.keySpec)) { return rule } continue } if (spec.type === 'range' || spec.type === 'version') { if (semver.intersects(spec.fetchSpec, rule.keySpec)) { return rule } continue } // if we got this far, the spec type is one of tag, directory or file // which means we have no real way to make version comparisons, so we // just accept the override return rule } return this } getNodeRule (node) { for (const rule of this.ruleset.values()) { if (rule.name !== node.name) { continue } if (semver.satisfies(node.version, rule.keySpec) || semver.satisfies(node.version, rule.value)) { return rule } } return this } getMatchingRule (node) { for (const rule of this.ruleset.values()) { if (rule.name !== node.name) { continue } if (semver.satisfies(node.version, rule.keySpec) || semver.satisfies(node.version, rule.value)) { return rule } } return null } * ancestry () { for (let ancestor = this; ancestor; ancestor = ancestor.parent) { yield ancestor } } get isRoot () { return !this.parent } get ruleset () { const ruleset = new Map() for (const override of this.ancestry()) { for (const kid of override.children.values()) { if (!ruleset.has(kid.key)) { ruleset.set(kid.key, kid) } } if (!override.isRoot && !ruleset.has(override.key)) { ruleset.set(override.key, override) } } return ruleset } } module.exports = OverrideSet PK]�\��� ]s]s"arborist/lib/query-selector-all.jsnu�[���'use strict' const { resolve } = require('node:path') const { parser, arrayDelimiter } = require('@npmcli/query') const localeCompare = require('@isaacs/string-locale-compare')('en') const { log } = require('proc-log') const { minimatch } = require('minimatch') const npa = require('npm-package-arg') const pacote = require('pacote') const semver = require('semver') const fetch = require('npm-registry-fetch') // handle results for parsed query asts, results are stored in a map that has a // key that points to each ast selector node and stores the resulting array of // arborist nodes as its value, that is essential to how we handle multiple // query selectors, e.g: `#a, #b, #c` <- 3 diff ast selector nodes class Results { #currentAstSelector #initialItems #inventory #outdatedCache = new Map() #vulnCache #pendingCombinator #results = new Map() #targetNode constructor (opts) { this.#currentAstSelector = opts.rootAstNode.nodes[0] this.#inventory = opts.inventory this.#initialItems = opts.initialItems this.#vulnCache = opts.vulnCache this.#targetNode = opts.targetNode this.currentResults = this.#initialItems // We get this when first called and need to pass it to pacote this.flatOptions = opts.flatOptions || {} // reset by rootAstNode walker this.currentAstNode = opts.rootAstNode } get currentResults () { return this.#results.get(this.#currentAstSelector) } set currentResults (value) { this.#results.set(this.#currentAstSelector, value) } // retrieves the initial items to which start the filtering / matching // for most of the different types of recognized ast nodes, e.g: class (aka // depType), id, *, etc in different contexts we need to start with the // current list of filtered results, for example a query for `.workspace` // actually means the same as `*.workspace` so we want to start with the full // inventory if that's the first ast node we're reading but if it appears in // the middle of a query it should respect the previous filtered results, // combinators are a special case in which we always want to have the // complete inventory list in order to use the left-hand side ast node as a // filter combined with the element on its right-hand side get initialItems () { const firstParsed = (this.currentAstNode.parent.nodes[0] === this.currentAstNode) && (this.currentAstNode.parent.parent.type === 'root') if (firstParsed) { return this.#initialItems } if (this.currentAstNode.prev().type === 'combinator') { return this.#inventory } return this.currentResults } // combinators need information about previously filtered items along // with info of the items parsed / retrieved from the selector right // past the combinator, for this reason combinators are stored and // only ran as the last part of each selector logic processPendingCombinator (nextResults) { if (this.#pendingCombinator) { const res = this.#pendingCombinator(this.currentResults, nextResults) this.#pendingCombinator = null this.currentResults = res } else { this.currentResults = nextResults } } // when collecting results to a root astNode, we traverse the list of child // selector nodes and collect all of their resulting arborist nodes into a // single/flat Set of items, this ensures we also deduplicate items collect (rootAstNode) { return new Set(rootAstNode.nodes.flatMap(n => this.#results.get(n))) } // selector types map to the '.type' property of the ast nodes via `${astNode.type}Type` // // attribute selector [name=value], etc attributeType () { const nextResults = this.initialItems.filter(node => attributeMatch(this.currentAstNode, node.package) ) this.processPendingCombinator(nextResults) } // dependency type selector (i.e. .prod, .dev, etc) // css calls this class, we interpret is as dependency type classType () { const depTypeFn = depTypes[String(this.currentAstNode)] if (!depTypeFn) { throw Object.assign( new Error(`\`${String(this.currentAstNode)}\` is not a supported dependency type.`), { code: 'EQUERYNODEPTYPE' } ) } const nextResults = depTypeFn(this.initialItems) this.processPendingCombinator(nextResults) } // combinators (i.e. '>', ' ', '~') combinatorType () { this.#pendingCombinator = combinators[String(this.currentAstNode)] } // name selectors (i.e. #foo) // css calls this id, we interpret it as name idType () { const name = this.currentAstNode.value const nextResults = this.initialItems.filter(node => (name === node.name) || (name === node.package.name) ) this.processPendingCombinator(nextResults) } // pseudo selectors (prefixed with :) async pseudoType () { const pseudoFn = `${this.currentAstNode.value.slice(1)}Pseudo` if (!this[pseudoFn]) { throw Object.assign( new Error(`\`${this.currentAstNode.value }\` is not a supported pseudo selector.`), { code: 'EQUERYNOPSEUDO' } ) } const nextResults = await this[pseudoFn]() this.processPendingCombinator(nextResults) } selectorType () { this.#currentAstSelector = this.currentAstNode // starts a new array in which resulting items // can be stored for each given ast selector if (!this.currentResults) { this.currentResults = [] } } universalType () { this.processPendingCombinator(this.initialItems) } // pseudo selectors map to the 'value' property of the pseudo selectors in the ast nodes // via selectors via `${value.slice(1)}Pseudo` attrPseudo () { const { lookupProperties, attributeMatcher } = this.currentAstNode return this.initialItems.filter(node => { let objs = [node.package] for (const prop of lookupProperties) { // if an isArray symbol is found that means we'll need to iterate // over the previous found array to basically make sure we traverse // all its indexes testing for possible objects that may eventually // hold more keys specified in a selector if (prop === arrayDelimiter) { objs = objs.flat() continue } // otherwise just maps all currently found objs // to the next prop from the lookup properties list, // filters out any empty key lookup objs = objs.flatMap(obj => obj[prop] || []) // in case there's no property found in the lookup // just filters that item out const noAttr = objs.every(obj => !obj) if (noAttr) { return false } } // if any of the potential object matches // that item should be in the final result return objs.some(obj => attributeMatch(attributeMatcher, obj)) }) } emptyPseudo () { return this.initialItems.filter(node => node.edgesOut.size === 0) } extraneousPseudo () { return this.initialItems.filter(node => node.extraneous) } async hasPseudo () { const found = [] for (const item of this.initialItems) { // This is the one time initialItems differs from inventory const res = await retrieveNodesFromParsedAst({ flatOptions: this.flatOptions, initialItems: [item], inventory: this.#inventory, rootAstNode: this.currentAstNode.nestedNode, targetNode: item, vulnCache: this.#vulnCache, }) if (res.size > 0) { found.push(item) } } return found } invalidPseudo () { const found = [] for (const node of this.initialItems) { for (const edge of node.edgesIn) { if (edge.invalid) { found.push(node) break } } } return found } async isPseudo () { const res = await retrieveNodesFromParsedAst({ flatOptions: this.flatOptions, initialItems: this.initialItems, inventory: this.#inventory, rootAstNode: this.currentAstNode.nestedNode, targetNode: this.currentAstNode, vulnCache: this.#vulnCache, }) return [...res] } linkPseudo () { return this.initialItems.filter(node => node.isLink || (node.isTop && !node.isRoot)) } missingPseudo () { return this.#inventory.reduce((res, node) => { for (const edge of node.edgesOut.values()) { if (edge.missing) { const pkg = { name: edge.name, version: edge.spec } const item = new this.#targetNode.constructor({ pkg }) item.queryContext = { missing: true, } item.edgesIn = new Set([edge]) res.push(item) } } return res }, []) } async notPseudo () { const res = await retrieveNodesFromParsedAst({ flatOptions: this.flatOptions, initialItems: this.initialItems, inventory: this.#inventory, rootAstNode: this.currentAstNode.nestedNode, targetNode: this.currentAstNode, vulnCache: this.#vulnCache, }) const internalSelector = new Set(res) return this.initialItems.filter(node => !internalSelector.has(node)) } overriddenPseudo () { return this.initialItems.filter(node => node.overridden) } pathPseudo () { return this.initialItems.filter(node => { if (!this.currentAstNode.pathValue) { return true } return minimatch( node.realpath.replace(/\\+/g, '/'), resolve(node.root.realpath, this.currentAstNode.pathValue).replace(/\\+/g, '/') ) }) } privatePseudo () { return this.initialItems.filter(node => node.package.private) } rootPseudo () { return this.initialItems.filter(node => node === this.#targetNode.root) } scopePseudo () { return this.initialItems.filter(node => node === this.#targetNode) } semverPseudo () { const { attributeMatcher, lookupProperties, semverFunc = 'infer', semverValue, } = this.currentAstNode const { qualifiedAttribute } = attributeMatcher if (!semverValue) { // DEPRECATED: remove this warning and throw an error as part of @npmcli/arborist@6 log.warn('query', 'usage of :semver() with no parameters is deprecated') return this.initialItems } if (!semver.valid(semverValue) && !semver.validRange(semverValue)) { throw Object.assign( new Error(`\`${semverValue}\` is not a valid semver version or range`), { code: 'EQUERYINVALIDSEMVER' }) } const valueIsVersion = !!semver.valid(semverValue) const nodeMatches = (node, obj) => { // if we already have an operator, the user provided some test as part of the selector // we evaluate that first because if it fails we don't want this node anyway if (attributeMatcher.operator) { if (!attributeMatch(attributeMatcher, obj)) { // if the initial operator doesn't match, we're done return false } } const attrValue = obj[qualifiedAttribute] // both valid and validRange return null for undefined, so this will skip both nodes that // do not have the attribute defined as well as those where the attribute value is invalid // and those where the value from the package.json is not a string if ((!semver.valid(attrValue) && !semver.validRange(attrValue)) || typeof attrValue !== 'string') { return false } const attrIsVersion = !!semver.valid(attrValue) let actualFunc = semverFunc // if we're asked to infer, we examine outputs to make a best guess if (actualFunc === 'infer') { if (valueIsVersion && attrIsVersion) { // two versions -> semver.eq actualFunc = 'eq' } else if (!valueIsVersion && !attrIsVersion) { // two ranges -> semver.intersects actualFunc = 'intersects' } else { // anything else -> semver.satisfies actualFunc = 'satisfies' } } if (['eq', 'neq', 'gt', 'gte', 'lt', 'lte'].includes(actualFunc)) { // both sides must be versions, but one is not if (!valueIsVersion || !attrIsVersion) { return false } return semver[actualFunc](attrValue, semverValue) } else if (['gtr', 'ltr', 'satisfies'].includes(actualFunc)) { // at least one side must be a version, but neither is if (!valueIsVersion && !attrIsVersion) { return false } return valueIsVersion ? semver[actualFunc](semverValue, attrValue) : semver[actualFunc](attrValue, semverValue) } else if (['intersects', 'subset'].includes(actualFunc)) { // these accept two ranges and since a version is also a range, anything goes return semver[actualFunc](attrValue, semverValue) } else { // user provided a function we don't know about, throw an error throw Object.assign(new Error(`\`semver.${actualFunc}\` is not a supported operator.`), { code: 'EQUERYINVALIDOPERATOR' }) } } return this.initialItems.filter((node) => { // no lookupProperties just means its a top level property, see if it matches if (!lookupProperties.length) { return nodeMatches(node, node.package) } // this code is mostly duplicated from attrPseudo to traverse into the package until we get // to our deepest requested object let objs = [node.package] for (const prop of lookupProperties) { if (prop === arrayDelimiter) { objs = objs.flat() continue } objs = objs.flatMap(obj => obj[prop] || []) const noAttr = objs.every(obj => !obj) if (noAttr) { return false } return objs.some(obj => nodeMatches(node, obj)) } }) } typePseudo () { if (!this.currentAstNode.typeValue) { return this.initialItems } return this.initialItems .flatMap(node => { const found = [] for (const edge of node.edgesIn) { if (npa(`${edge.name}@${edge.spec}`).type === this.currentAstNode.typeValue) { found.push(edge.to) } } return found }) } dedupedPseudo () { return this.initialItems.filter(node => node.target.edgesIn.size > 1) } async vulnPseudo () { if (!this.initialItems.length) { return this.initialItems } if (!this.#vulnCache) { const packages = {} // We have to map the items twice, once to get the request, and a second time to filter out the results of that request this.initialItems.map((node) => { if (node.isProjectRoot || node.package.private) { return } if (!packages[node.name]) { packages[node.name] = [] } if (!packages[node.name].includes(node.version)) { packages[node.name].push(node.version) } }) const res = await fetch('/-/npm/v1/security/advisories/bulk', { ...this.flatOptions, registry: this.flatOptions.auditRegistry || this.flatOptions.registry, method: 'POST', gzip: true, body: packages, }) this.#vulnCache = await res.json() } const advisories = this.#vulnCache const { vulns } = this.currentAstNode return this.initialItems.filter(item => { const vulnerable = advisories[item.name]?.filter(advisory => { // This could be for another version of this package elsewhere in the tree if (!semver.intersects(advisory.vulnerable_versions, item.version)) { return false } if (!vulns) { return true } // vulns are OR with each other, if any one matches we're done for (const vuln of vulns) { if (vuln.severity && !vuln.severity.includes('*')) { if (!vuln.severity.includes(advisory.severity)) { continue } } if (vuln?.cwe) { // * is special, it means "has a cwe" if (vuln.cwe.includes('*')) { if (!advisory.cwe.length) { continue } } else if (!vuln.cwe.every(cwe => advisory.cwe.includes(`CWE-${cwe}`))) { continue } } return true } }) if (vulnerable?.length) { item.queryContext = { advisories: vulnerable, } return true } return false }) } async outdatedPseudo () { const { outdatedKind = 'any' } = this.currentAstNode // filter the initialItems // NOTE: this uses a Promise.all around a map without in-line concurrency handling // since the only async action taken is retrieving the packument, which is limited // based on the max-sockets config in make-fetch-happen const initialResults = await Promise.all(this.initialItems.map(async (node) => { // the root can't be outdated, skip it if (node.isProjectRoot) { return false } // private packages can't be published, skip them if (node.package.private) { return false } // we cache the promise representing the full versions list, this helps reduce the // number of requests we send by keeping population of the cache in a single tick // making it less likely that multiple requests for the same package will be inflight if (!this.#outdatedCache.has(node.name)) { this.#outdatedCache.set(node.name, getPackageVersions(node.name, this.flatOptions)) } const availableVersions = await this.#outdatedCache.get(node.name) // we attach _all_ versions to the queryContext to allow consumers to do their own // filtering and comparisons node.queryContext.versions = availableVersions // next we further reduce the set to versions that are greater than the current one const greaterVersions = availableVersions.filter((available) => { return semver.gt(available, node.version) }) // no newer versions than the current one, drop this node from the result set if (!greaterVersions.length) { return false } // if we got here, we know that newer versions exist, if the kind is 'any' we're done if (outdatedKind === 'any') { return node } // look for newer versions that differ from current by a specific part of the semver version if (['major', 'minor', 'patch'].includes(outdatedKind)) { // filter the versions greater than our current one based on semver.diff const filteredVersions = greaterVersions.filter((version) => { return semver.diff(node.version, version) === outdatedKind }) // no available versions are of the correct diff type if (!filteredVersions.length) { return false } return node } // look for newer versions that satisfy at least one edgeIn to this node if (outdatedKind === 'in-range') { const inRangeContext = [] for (const edge of node.edgesIn) { const inRangeVersions = greaterVersions.filter((version) => { return semver.satisfies(version, edge.spec) }) // this edge has no in-range candidates, just move on if (!inRangeVersions.length) { continue } inRangeContext.push({ from: edge.from.location, versions: inRangeVersions, }) } // if we didn't find at least one match, drop this node if (!inRangeContext.length) { return false } // now add to the context each version that is in-range for each edgeIn node.queryContext.outdated = { ...node.queryContext.outdated, inRange: inRangeContext, } return node } // look for newer versions that _do not_ satisfy at least one edgeIn if (outdatedKind === 'out-of-range') { const outOfRangeContext = [] for (const edge of node.edgesIn) { const outOfRangeVersions = greaterVersions.filter((version) => { return !semver.satisfies(version, edge.spec) }) // this edge has no out-of-range candidates, skip it if (!outOfRangeVersions.length) { continue } outOfRangeContext.push({ from: edge.from.location, versions: outOfRangeVersions, }) } // if we didn't add at least one thing to the context, this node is not a match if (!outOfRangeContext.length) { return false } // attach the out-of-range context to the node node.queryContext.outdated = { ...node.queryContext.outdated, outOfRange: outOfRangeContext, } return node } // any other outdatedKind is unknown and will never match return false })) // return an array with the holes for non-matching nodes removed return initialResults.filter(Boolean) } } // operators for attribute selectors const attributeOperators = { // attribute value is equivalent '=' ({ attr, value }) { return attr === value }, // attribute value contains word '~=' ({ attr, value }) { return (attr.match(/\w+/g) || []).includes(value) }, // attribute value contains string '*=' ({ attr, value }) { return attr.includes(value) }, // attribute value is equal or starts with '|=' ({ attr, value }) { return attr.startsWith(`${value}-`) }, // attribute value starts with '^=' ({ attr, value }) { return attr.startsWith(value) }, // attribute value ends with '$=' ({ attr, value }) { return attr.endsWith(value) }, } const attributeOperator = ({ attr, value, insensitive, operator }) => { if (typeof attr === 'number') { attr = String(attr) } if (typeof attr !== 'string') { // It's an object or an array, bail return false } if (insensitive) { attr = attr.toLowerCase() } return attributeOperators[operator]({ attr, insensitive, value, }) } const attributeMatch = (matcher, obj) => { const insensitive = !!matcher.insensitive const operator = matcher.operator || '' const attribute = matcher.qualifiedAttribute let value = matcher.value || '' // return early if checking existence if (operator === '') { return Boolean(obj[attribute]) } if (insensitive) { value = value.toLowerCase() } // in case the current object is an array // then we try to match every item in the array if (Array.isArray(obj[attribute])) { return obj[attribute].find((i, index) => { const attr = obj[attribute][index] || '' return attributeOperator({ attr, value, insensitive, operator }) }) } else { const attr = obj[attribute] || '' return attributeOperator({ attr, value, insensitive, operator }) } } const edgeIsType = (node, type, seen = new Set()) => { for (const edgeIn of node.edgesIn) { // TODO Need a test with an infinite loop if (seen.has(edgeIn)) { continue } seen.add(edgeIn) if (edgeIn.type === type || edgeIn.from[type] || edgeIsType(edgeIn.from, type, seen)) { return true } } return false } const filterByType = (nodes, type) => { const found = [] for (const node of nodes) { if (node[type] || edgeIsType(node, type)) { found.push(node) } } return found } const depTypes = { // dependency '.prod' (prevResults) { const found = [] for (const node of prevResults) { if (!node.dev) { found.push(node) } } return found }, // devDependency '.dev' (prevResults) { return filterByType(prevResults, 'dev') }, // optionalDependency '.optional' (prevResults) { return filterByType(prevResults, 'optional') }, // peerDependency '.peer' (prevResults) { return filterByType(prevResults, 'peer') }, // workspace '.workspace' (prevResults) { return prevResults.filter(node => node.isWorkspace) }, // bundledDependency '.bundled' (prevResults) { return prevResults.filter(node => node.inBundle) }, } // checks if a given node has a direct parent in any of the nodes provided in // the compare nodes array const hasParent = (node, compareNodes) => { // All it takes is one so we loop and return on the first hit for (let compareNode of compareNodes) { if (compareNode.isLink) { compareNode = compareNode.target } // follows logical parent for link anscestors if (node.isTop && (node.resolveParent === compareNode)) { return true } // follows edges-in to check if they match a possible parent for (const edge of node.edgesIn) { if (edge && edge.from === compareNode) { return true } } } return false } // checks if a given node is a descendant of any of the nodes provided in the // compareNodes array const hasAscendant = (node, compareNodes, seen = new Set()) => { // TODO (future) loop over ancestry property if (hasParent(node, compareNodes)) { return true } if (node.isTop && node.resolveParent) { /* istanbul ignore if - investigate if linksIn check obviates need for this */ if (hasAscendant(node.resolveParent, compareNodes)) { return true } } for (const edge of node.edgesIn) { // TODO Need a test with an infinite loop if (seen.has(edge)) { continue } seen.add(edge) if (edge && edge.from && hasAscendant(edge.from, compareNodes, seen)) { return true } } for (const linkNode of node.linksIn) { if (hasAscendant(linkNode, compareNodes, seen)) { return true } } return false } const combinators = { // direct descendant '>' (prevResults, nextResults) { return nextResults.filter(node => hasParent(node, prevResults)) }, // any descendant ' ' (prevResults, nextResults) { return nextResults.filter(node => hasAscendant(node, prevResults)) }, // sibling '~' (prevResults, nextResults) { // Return any node in nextResults that is a sibling of (aka shares a // parent with) a node in prevResults const parentNodes = new Set() // Parents of everything in prevResults for (const node of prevResults) { for (const edge of node.edgesIn) { // edge.from always exists cause it's from another node's edgesIn parentNodes.add(edge.from) } } return nextResults.filter(node => !prevResults.includes(node) && hasParent(node, [...parentNodes]) ) }, } // get a list of available versions of a package filtered to respect --before // NOTE: this runs over each node and should not throw const getPackageVersions = async (name, opts) => { let packument try { packument = await pacote.packument(name, { ...opts, fullMetadata: false, // we only need the corgi }) } catch (err) { // if the fetch fails, log a warning and pretend there are no versions log.warn('query', `could not retrieve packument for ${name}: ${err.message}`) return [] } // start with a sorted list of all versions (lowest first) let candidates = Object.keys(packument.versions).sort(semver.compare) // if the packument has a time property, and the user passed a before flag, then // we filter this list down to only those versions that existed before the specified date if (packument.time && opts.before) { candidates = candidates.filter((version) => { // this version isn't found in the times at all, drop it if (!packument.time[version]) { return false } return Date.parse(packument.time[version]) <= opts.before }) } return candidates } const retrieveNodesFromParsedAst = async (opts) => { // when we first call this it's the parsed query. all other times it's // results.currentNode.nestedNode const rootAstNode = opts.rootAstNode if (!rootAstNode.nodes) { return new Set() } const results = new Results(opts) const astNodeQueue = new Set() // walk is sync, so we have to build up our async functions and then await them later rootAstNode.walk((nextAstNode) => { astNodeQueue.add(nextAstNode) }) for (const nextAstNode of astNodeQueue) { // This is the only place we reset currentAstNode results.currentAstNode = nextAstNode const updateFn = `${results.currentAstNode.type}Type` if (typeof results[updateFn] !== 'function') { throw Object.assign( new Error(`\`${results.currentAstNode.type}\` is not a supported selector.`), { code: 'EQUERYNOSELECTOR' } ) } await results[updateFn]() } return results.collect(rootAstNode) } const querySelectorAll = async (targetNode, query, flatOptions) => { // This never changes ever we just pass it around. But we can't scope it to // this whole file if we ever want to support concurrent calls to this // function. const inventory = [...targetNode.root.inventory.values()] // res is a Set of items returned for each parsed css ast selector const res = await retrieveNodesFromParsedAst({ initialItems: inventory, inventory, flatOptions, rootAstNode: parser(query), targetNode, }) // returns nodes ordered by realpath return [...res].sort((a, b) => localeCompare(a.location, b.location)) } module.exports = querySelectorAll PK]�\l��h� � arborist/lib/packument-cache.jsnu�[���const { LRUCache } = require('lru-cache') const { getHeapStatistics } = require('node:v8') const { log } = require('proc-log') // This is an in-memory cache that Pacote uses for packuments. // Packuments are usually cached on disk. This allows for rapid re-requests // of the same packument to bypass disk reads. The tradeoff here is memory // usage for disk reads. class PackumentCache extends LRUCache { static #heapLimit = Math.floor(getHeapStatistics().heap_size_limit) #sizeKey #disposed = new Set() #log (...args) { log.silly('packumentCache', ...args) } constructor ({ // How much of this.#heapLimit to take up heapFactor = 0.25, // How much of this.#maxSize we allow any one packument to take up // Anything over this is not cached maxEntryFactor = 0.5, sizeKey = '_contentLength', } = {}) { const maxSize = Math.floor(PackumentCache.#heapLimit * heapFactor) const maxEntrySize = Math.floor(maxSize * maxEntryFactor) super({ maxSize, maxEntrySize, sizeCalculation: (p) => { // Don't cache if we dont know the size // Some versions of pacote set this to `0`, newer versions set it to `null` if (!p[sizeKey]) { return maxEntrySize + 1 } if (p[sizeKey] < 10_000) { return p[sizeKey] * 2 } if (p[sizeKey] < 1_000_000) { return Math.floor(p[sizeKey] * 1.5) } // It is less beneficial to store a small amount of super large things // at the cost of all other packuments. return maxEntrySize + 1 }, dispose: (v, k) => { this.#disposed.add(k) this.#log(k, 'dispose') }, }) this.#sizeKey = sizeKey this.#log(`heap:${PackumentCache.#heapLimit} maxSize:${maxSize} maxEntrySize:${maxEntrySize}`) } set (k, v, ...args) { // we use disposed only for a logging signal if we are setting packuments that // have already been evicted from the cache previously. logging here could help // us tune this in the future. const disposed = this.#disposed.has(k) /* istanbul ignore next - this doesnt happen consistently so hard to test without resorting to unit tests */ if (disposed) { this.#disposed.delete(k) } this.#log(k, 'set', `size:${v[this.#sizeKey]} disposed:${disposed}`) return super.set(k, v, ...args) } has (k, ...args) { const has = super.has(k, ...args) this.#log(k, `cache-${has ? 'hit' : 'miss'}`) return has } } module.exports = PackumentCache PK]�\֛H H arborist/lib/peer-entry-sets.jsnu�[���// Given a node in a tree, return all of the peer dependency sets that // it is a part of, with the entry (top or non-peer) edges into the sets // identified. // // With this information, we can determine whether it is appropriate to // replace the entire peer set with another (and remove the old one), // push the set deeper into the tree, and so on. // // Returns a Map of { edge => Set(peerNodes) }, const peerEntrySets = node => { // this is the union of all peer groups that the node is a part of // later, we identify all of the entry edges, and create a set of // 1 or more overlapping sets that this node is a part of. const unionSet = new Set([node]) for (const node of unionSet) { for (const edge of node.edgesOut.values()) { if (edge.valid && edge.peer && edge.to) { unionSet.add(edge.to) } } for (const edge of node.edgesIn) { if (edge.valid && edge.peer) { unionSet.add(edge.from) } } } const entrySets = new Map() for (const peer of unionSet) { for (const edge of peer.edgesIn) { // if not valid, it doesn't matter anyway. either it's been previously // peerConflicted, or it's the thing we're interested in replacing. if (!edge.valid) { continue } // this is the entry point into the peer set if (!edge.peer || edge.from.isTop) { // get the subset of peer brought in by this peer entry edge const sub = new Set([peer]) for (const peer of sub) { for (const edge of peer.edgesOut.values()) { if (edge.valid && edge.peer && edge.to) { sub.add(edge.to) } } } // if this subset does not include the node we are focused on, // then it is not relevant for our purposes. Example: // // a -> (b, c, d) // b -> PEER(d) b -> d -> e -> f <-> g // c -> PEER(f, h) c -> (f <-> g, h -> g) // d -> PEER(e) d -> e -> f <-> g // e -> PEER(f) // f -> PEER(g) // g -> PEER(f) // h -> PEER(g) // // The unionSet(e) will include c, but we don't actually care about // it. We only expanded to the edge of the peer nodes in order to // find the entry edges that caused the inclusion of peer sets // including (e), so we want: // Map{ // Edge(a->b) => Set(b, d, e, f, g) // Edge(a->d) => Set(d, e, f, g) // } if (sub.has(node)) { entrySets.set(edge, sub) } } } } return entrySets } module.exports = peerEntrySets PK]�\� \SGGarborist/lib/from-path.jsnu�[���// file dependencies need their dependencies resolved based on the location // where the tarball was found, not the location where they end up getting // installed. directory (ie, symlink) deps also need to be resolved based on // their targets, but that's what realpath is const { dirname } = require('node:path') const npa = require('npm-package-arg') const fromPath = (node, edge) => { if (edge && edge.overrides && edge.overrides.name === edge.name && edge.overrides.value) { // fromPath could be called with a node that has a virtual root, if that // happens we want to make sure we get the real root node when overrides // are in use. this is to allow things like overriding a dependency with a // tarball file that's a relative path from the project root if (node.sourceReference) { return node.sourceReference.root.realpath } return node.root.realpath } if (node.resolved) { const spec = npa(node.resolved) if (spec?.type === 'file') { return dirname(spec.fetchSpec) } } return node.realpath } module.exports = fromPath PK]�\h't�� arborist/lib/version-from-tgz.jsnu�[���const semver = require('semver') const { basename } = require('node:path') const { URL } = require('node:url') module.exports = (name, tgz) => { const base = basename(tgz) if (!base.endsWith('.tgz')) { return null } if (tgz.startsWith('http:/') || tgz.startsWith('https:/')) { const u = new URL(tgz) // registry url? check for most likely pattern. // either /@foo/bar/-/bar-1.2.3.tgz or // /foo/-/foo-1.2.3.tgz, and fall through to // basename checking. Note that registries can // be mounted below the root url, so /a/b/-/x/y/foo/-/foo-1.2.3.tgz // is a potential option. const tfsplit = u.pathname.slice(1).split('/-/') if (tfsplit.length > 1) { const afterTF = tfsplit.pop() if (afterTF === base) { const pre = tfsplit.pop() const preSplit = pre.split(/\/|%2f/i) const project = preSplit.pop() const scope = preSplit.pop() return versionFromBaseScopeName(base, scope, project) } } } const split = name.split(/\/|%2f/i) const project = split.pop() const scope = split.pop() return versionFromBaseScopeName(base, scope, project) } const versionFromBaseScopeName = (base, scope, name) => { if (!base.startsWith(name + '-')) { return null } const parsed = semver.parse(base.substring(name.length + 1, base.length - 4)) return parsed ? { name: scope && scope.charAt(0) === '@' ? `${scope}/${name}` : name, version: parsed.version, } : null } PK]�\E��#��arborist/lib/node.jsnu�[���// inventory, path, realpath, root, and parent // // node.root is a reference to the root module in the tree (ie, typically the // cwd project folder) // // node.location is the /-delimited path from the root module to the node. In // the case of link targets that may be outside of the root's package tree, // this can include some number of /../ path segments. The location of the // root module is always '.'. node.location thus never contains drive letters // or absolute paths, and is portable within a given project, suitable for // inclusion in lockfiles and metadata. // // node.path is the path to the place where this node lives on disk. It is // system-specific and absolute. // // node.realpath is the path to where the module actually resides on disk. In // the case of non-link nodes, node.realpath is equivalent to node.path. In // the case of link nodes, it is equivalent to node.target.path. // // Setting node.parent will set the node's root to the parent's root, as well // as updating edgesIn and edgesOut to reload dependency resolutions as needed, // and setting node.path to parent.path/node_modules/name. // // node.inventory is a Map of name to a Set() of all the nodes under a given // root by that name. It's empty for non-root nodes, and changing the root // reference will remove it from the old root's inventory and add it to the new // one. This map is useful for cases like `npm update foo` or `npm ls foo` // where we need to quickly find all instances of a given package name within a // tree. const semver = require('semver') const nameFromFolder = require('@npmcli/name-from-folder') const Edge = require('./edge.js') const Inventory = require('./inventory.js') const OverrideSet = require('./override-set.js') const { normalize } = require('read-package-json-fast') const { getPaths: getBinPaths } = require('bin-links') const npa = require('npm-package-arg') const debug = require('./debug.js') const gatherDepSet = require('./gather-dep-set.js') const treeCheck = require('./tree-check.js') const { walkUp } = require('walk-up-path') const { resolve, relative, dirname, basename } = require('node:path') const util = require('node:util') const _package = Symbol('_package') const _parent = Symbol('_parent') const _target = Symbol.for('_target') const _fsParent = Symbol('_fsParent') const _reloadNamedEdges = Symbol('_reloadNamedEdges') // overridden by Link class const _loadDeps = Symbol.for('Arborist.Node._loadDeps') const _refreshLocation = Symbol.for('_refreshLocation') const _changePath = Symbol.for('_changePath') // used by Link class as well const _delistFromMeta = Symbol.for('_delistFromMeta') const _explain = Symbol('_explain') const _explanation = Symbol('_explanation') const relpath = require('./relpath.js') const consistentResolve = require('./consistent-resolve.js') const printableTree = require('./printable.js') const CaseInsensitiveMap = require('./case-insensitive-map.js') const querySelectorAll = require('./query-selector-all.js') class Node { #global #meta #root #workspaces constructor (options) { // NB: path can be null if it's a link target const { root, path, realpath, parent, error, meta, fsParent, resolved, integrity, // allow setting name explicitly when we haven't set a path yet name, children, fsChildren, installLinks = false, legacyPeerDeps = false, linksIn, isInStore = false, hasShrinkwrap, overrides, loadOverrides = false, extraneous = true, dev = true, optional = true, devOptional = true, peer = true, global = false, dummy = false, sourceReference = null, } = options // this object gives querySelectorAll somewhere to stash context about a node // while processing a query this.queryContext = {} // true if part of a global install this.#global = global this.#workspaces = null this.errors = error ? [error] : [] this.isInStore = isInStore // this will usually be null, except when modeling a // package's dependencies in a virtual root. this.sourceReference = sourceReference // TODO if this came from pacote.manifest we don't have to do this, // we can be told to skip this step const pkg = sourceReference ? sourceReference.package : normalize(options.pkg || {}) this.name = name || nameFromFolder(path || pkg.name || realpath) || pkg.name || null // should be equal if not a link this.path = path ? resolve(path) : null if (!this.name && (!this.path || this.path !== dirname(this.path))) { throw new TypeError('could not detect node name from path or package') } this.realpath = !this.isLink ? this.path : resolve(realpath) this.resolved = resolved || null if (!this.resolved) { // note: this *only* works for non-file: deps, so we avoid even // trying here. // file: deps are tracked in package.json will _resolved set to the // full path to the tarball or link target. However, if the package // is checked into git or moved to another location, that's 100% not // portable at all! The _where and _location don't provide much help, // since _location is just where the module ended up in the tree, // and _where can be different than the actual root if it's a // meta-dep deeper in the dependency graph. // // If we don't have the other oldest indicators of legacy npm, then it's // probably what we're getting from pacote, which IS trustworthy. // // Otherwise, hopefully a shrinkwrap will help us out. const resolved = consistentResolve(pkg._resolved) if (resolved && !(/^file:/.test(resolved) && pkg._where)) { this.resolved = resolved } } this.integrity = integrity || pkg._integrity || null this.hasShrinkwrap = hasShrinkwrap || pkg._hasShrinkwrap || false this.installLinks = installLinks this.legacyPeerDeps = legacyPeerDeps this.children = new CaseInsensitiveMap() this.fsChildren = new Set() this.inventory = new Inventory() this.tops = new Set() this.linksIn = new Set(linksIn || []) // these three are set by an Arborist taking a catalog // after the tree is built. We don't get this along the way, // because they have a tendency to change as new children are // added, especially when they're deduped. Eg, a dev dep may be // a 3-levels-deep dependency of a non-dev dep. If we calc the // flags along the way, then they'll tend to be invalid by the // time we need to look at them. if (!dummy) { this.dev = dev this.optional = optional this.devOptional = devOptional this.peer = peer this.extraneous = extraneous this.dummy = false } else { // true if this is a placeholder for the purpose of serving as a // fsParent to link targets that get their deps resolved outside // the root tree folder. this.dummy = true this.dev = false this.optional = false this.devOptional = false this.peer = false this.extraneous = false } this.edgesIn = new Set() this.edgesOut = new CaseInsensitiveMap() // have to set the internal package ref before assigning the parent, // because this.package is read when adding to inventory this[_package] = pkg && typeof pkg === 'object' ? pkg : {} if (overrides) { this.overrides = overrides } else if (loadOverrides) { const overrides = this[_package].overrides || {} if (Object.keys(overrides).length > 0) { this.overrides = new OverrideSet({ overrides: this[_package].overrides, }) } } // only relevant for the root and top nodes this.meta = meta // Note: this is _slightly_ less efficient for the initial tree // building than it could be, but in exchange, it's a much simpler // algorithm. // If this node has a bunch of children, and those children satisfy // its various deps, then we're going to _first_ create all the // edges, and _then_ assign the children into place, re-resolving // them all in _reloadNamedEdges. // A more efficient, but more complicated, approach would be to // flag this node as being a part of a tree build, so it could // hold off on resolving its deps until its children are in place. // call the parent setter // Must be set prior to calling _loadDeps, because top-ness is relevant // will also assign root if present on the parent this[_parent] = null this.parent = parent || null this[_fsParent] = null this.fsParent = fsParent || null // see parent/root setters below. // root is set to parent's root if we have a parent, otherwise if it's // null, then it's set to the node itself. if (!parent && !fsParent) { this.root = root || null } // mostly a convenience for testing, but also a way to create // trees in a more declarative way than setting parent on each if (children) { for (const c of children) { new Node({ ...c, parent: this }) } } if (fsChildren) { for (const c of fsChildren) { new Node({ ...c, fsParent: this }) } } // now load all the dep edges this[_loadDeps]() } get meta () { return this.#meta } set meta (meta) { this.#meta = meta if (meta) { meta.add(this) } } get global () { if (this.#root === this) { return this.#global } return this.#root.global } // true for packages installed directly in the global node_modules folder get globalTop () { return this.global && this.parent && this.parent.isProjectRoot } get workspaces () { return this.#workspaces } set workspaces (workspaces) { // deletes edges if they already exists if (this.#workspaces) { for (const name of this.#workspaces.keys()) { if (!workspaces.has(name)) { this.edgesOut.get(name).detach() } } } this.#workspaces = workspaces this.#loadWorkspaces() this[_loadDeps]() } get binPaths () { if (!this.parent) { return [] } return getBinPaths({ pkg: this[_package], path: this.path, global: this.global, top: this.globalTop, }) } get hasInstallScript () { const { hasInstallScript, scripts } = this.package const { install, preinstall, postinstall } = scripts || {} return !!(hasInstallScript || install || preinstall || postinstall) } get version () { return this[_package].version || '' } get packageName () { return this[_package].name || null } get pkgid () { const { name = '', version = '' } = this.package // root package will prefer package name over folder name, // and never be called an alias. const { isProjectRoot } = this const myname = isProjectRoot ? name || this.name : this.name const alias = !isProjectRoot && name && myname !== name ? `npm:${name}@` : '' return `${myname}@${alias}${version}` } get overridden () { return !!(this.overrides && this.overrides.value && this.overrides.name === this.name) } get package () { return this[_package] } set package (pkg) { // just detach them all. we could make this _slightly_ more efficient // by only detaching the ones that changed, but we'd still have to walk // them all, and the comparison logic gets a bit tricky. we generally // only do this more than once at the root level, so the resolve() calls // are only one level deep, and there's not much to be saved, anyway. // simpler to just toss them all out. for (const edge of this.edgesOut.values()) { edge.detach() } this[_explanation] = null /* istanbul ignore next - should be impossible */ if (!pkg || typeof pkg !== 'object') { debug(() => { throw new Error('setting Node.package to non-object') }) pkg = {} } this[_package] = pkg this.#loadWorkspaces() this[_loadDeps]() // do a hard reload, since the dependents may now be valid or invalid // as a result of the package change. this.edgesIn.forEach(edge => edge.reload(true)) } // node.explain(nodes seen already, edge we're trying to satisfy // if edge is not specified, it lists every edge into the node. explain (edge = null, seen = []) { if (this[_explanation]) { return this[_explanation] } return this[_explanation] = this[_explain](edge, seen) } [_explain] (edge, seen) { if (this.isProjectRoot && !this.sourceReference) { return { location: this.path, } } const why = { name: this.isProjectRoot || this.isTop ? this.packageName : this.name, version: this.package.version, } if (this.errors.length || !this.packageName || !this.package.version) { why.errors = this.errors.length ? this.errors : [ new Error('invalid package: lacks name and/or version'), ] why.package = this.package } if (this.root.sourceReference) { const { name, version } = this.root.package why.whileInstalling = { name, version, path: this.root.sourceReference.path, } } if (this.sourceReference) { return this.sourceReference.explain(edge, seen) } if (seen.includes(this)) { return why } why.location = this.location why.isWorkspace = this.isWorkspace // make a new list each time. we can revisit, but not loop. seen = seen.concat(this) why.dependents = [] if (edge) { why.dependents.push(edge.explain(seen)) } else { // ignore invalid edges, since those aren't satisfied by this thing, // and are not keeping it held in this spot anyway. const edges = [] for (const edge of this.edgesIn) { if (!edge.valid && !edge.from.isProjectRoot) { continue } edges.push(edge) } for (const edge of edges) { why.dependents.push(edge.explain(seen)) } } if (this.linksIn.size) { why.linksIn = [...this.linksIn].map(link => link[_explain](edge, seen)) } return why } isDescendantOf (node) { for (let p = this; p; p = p.resolveParent) { if (p === node) { return true } } return false } getBundler (path = []) { // made a cycle, definitely not bundled! if (path.includes(this)) { return null } path.push(this) const parent = this[_parent] if (!parent) { return null } const pBundler = parent.getBundler(path) if (pBundler) { return pBundler } const ppkg = parent.package const bd = ppkg && ppkg.bundleDependencies // explicit bundling if (Array.isArray(bd) && bd.includes(this.name)) { return parent } // deps that are deduped up to the bundling level are bundled. // however, if they get their dep met further up than that, // then they are not bundled. Ie, installing a package with // unmet bundled deps will not cause your deps to be bundled. for (const edge of this.edgesIn) { const eBundler = edge.from.getBundler(path) if (!eBundler) { continue } if (eBundler === parent) { return eBundler } } return null } get inBundle () { return !!this.getBundler() } // when reifying, if a package is technically in a bundleDependencies list, // but that list is the root project, we still have to install it. This // getter returns true if it's in a dependency's bundle list, not the root's. get inDepBundle () { const bundler = this.getBundler() return !!bundler && bundler !== this.root } get isWorkspace () { if (this.isProjectRoot) { return false } const { root } = this const { type, to } = root.edgesOut.get(this.packageName) || {} return type === 'workspace' && to && (to.target === this || to === this) } get isRoot () { return this === this.root } get isProjectRoot () { // only treat as project root if it's the actual link that is the root, // or the target of the root link, but NOT if it's another link to the // same root that happens to be somewhere else. return this === this.root || this === this.root.target } get isRegistryDependency () { if (this.edgesIn.size === 0) { return false } for (const edge of this.edgesIn) { if (!npa(edge.spec).registry) { return false } } return true } * ancestry () { for (let anc = this; anc; anc = anc.resolveParent) { yield anc } } set root (root) { // setting to null means this is the new root // should only ever be one step while (root && root.root !== root) { root = root.root } root = root || this // delete from current root inventory this[_delistFromMeta]() // can't set the root (yet) if there's no way to determine location // this allows us to do new Node({...}) and then set the root later. // just make the assignment so we don't lose it, and move on. if (!this.path || !root.realpath || !root.path) { this.#root = root return } // temporarily become a root node this.#root = this // break all linksIn, we're going to re-set them if needed later for (const link of this.linksIn) { link[_target] = null this.linksIn.delete(link) } // temporarily break this link as well, we'll re-set if possible later const { target } = this if (this.isLink) { if (target) { target.linksIn.delete(this) if (target.root === this) { target[_delistFromMeta]() } } this[_target] = null } // if this is part of a cascading root set, then don't do this bit // but if the parent/fsParent is in a different set, we have to break // that reference before proceeding if (this.parent && this.parent.root !== root) { this.parent.children.delete(this.name) this[_parent] = null } if (this.fsParent && this.fsParent.root !== root) { this.fsParent.fsChildren.delete(this) this[_fsParent] = null } if (root === this) { this[_refreshLocation]() } else { // setting to some different node. const loc = relpath(root.realpath, this.path) const current = root.inventory.get(loc) // clobber whatever is there now if (current) { current.root = null } this.#root = root // set this.location and add to inventory this[_refreshLocation]() // try to find our parent/fsParent in the new root inventory for (const p of walkUp(dirname(this.path))) { if (p === this.path) { continue } const ploc = relpath(root.realpath, p) const parent = root.inventory.get(ploc) if (parent) { /* istanbul ignore next - impossible */ if (parent.isLink) { debug(() => { throw Object.assign(new Error('assigning parentage to link'), { path: this.path, parent: parent.path, parentReal: parent.realpath, }) }) continue } const childLoc = `${ploc}${ploc ? '/' : ''}node_modules/${this.name}` const isParent = this.location === childLoc if (isParent) { const oldChild = parent.children.get(this.name) if (oldChild && oldChild !== this) { oldChild.root = null } if (this.parent) { this.parent.children.delete(this.name) this.parent[_reloadNamedEdges](this.name) } parent.children.set(this.name, this) this[_parent] = parent // don't do it for links, because they don't have a target yet // we'll hit them up a bit later on. if (!this.isLink) { parent[_reloadNamedEdges](this.name) } } else { /* istanbul ignore if - should be impossible, since we break * all fsParent/child relationships when moving? */ if (this.fsParent) { this.fsParent.fsChildren.delete(this) } parent.fsChildren.add(this) this[_fsParent] = parent } break } } // if it doesn't have a parent, it's a top node if (!this.parent) { root.tops.add(this) } else { root.tops.delete(this) } // assign parentage for any nodes that need to have this as a parent // this can happen when we have a node at nm/a/nm/b added *before* // the node at nm/a, which might have the root node as a fsParent. // we can't rely on the public setter here, because it calls into // this function to set up these references! // check dirname so that /foo isn't treated as the fsparent of /foo-bar const nmloc = `${this.location}${this.location ? '/' : ''}node_modules/` // only walk top nodes, since anything else already has a parent. for (const child of root.tops) { const isChild = child.location === nmloc + child.name const isFsChild = dirname(child.path).startsWith(this.path) && child !== this && !child.parent && ( !child.fsParent || child.fsParent === this || dirname(this.path).startsWith(child.fsParent.path) ) if (!isChild && !isFsChild) { continue } // set up the internal parentage links if (this.isLink) { child.root = null } else { // can't possibly have a parent, because it's in tops if (child.fsParent) { child.fsParent.fsChildren.delete(child) } child[_fsParent] = null if (isChild) { this.children.set(child.name, child) child[_parent] = this root.tops.delete(child) } else { this.fsChildren.add(child) child[_fsParent] = this } } } // look for any nodes with the same realpath. either they're links // to that realpath, or a thing at that realpath if we're adding a link // (if we're adding a regular node, we already deleted the old one) for (const node of root.inventory.query('realpath', this.realpath)) { if (node === this) { continue } /* istanbul ignore next - should be impossible */ debug(() => { if (node.root !== root) { throw new Error('inventory contains node from other root') } }) if (this.isLink) { const target = node.target this[_target] = target this[_package] = target.package target.linksIn.add(this) // reload edges here, because now we have a target if (this.parent) { this.parent[_reloadNamedEdges](this.name) } break } else { /* istanbul ignore else - should be impossible */ if (node.isLink) { node[_target] = this node[_package] = this.package this.linksIn.add(node) if (node.parent) { node.parent[_reloadNamedEdges](node.name) } } else { debug(() => { throw Object.assign(new Error('duplicate node in root setter'), { path: this.path, realpath: this.realpath, root: root.realpath, }) }) } } } } // reload all edgesIn where the root doesn't match, so we don't have // cross-tree dependency graphs for (const edge of this.edgesIn) { if (edge.from.root !== root) { edge.reload() } } // reload all edgesOut where root doens't match, or is missing, since // it might not be missing in the new tree for (const edge of this.edgesOut.values()) { if (!edge.to || edge.to.root !== root) { edge.reload() } } // now make sure our family comes along for the ride! const family = new Set([ ...this.fsChildren, ...this.children.values(), ...this.inventory.values(), ].filter(n => n !== this)) for (const child of family) { if (child.root !== root) { child[_delistFromMeta]() child[_parent] = null this.children.delete(child.name) child[_fsParent] = null this.fsChildren.delete(child) for (const l of child.linksIn) { l[_target] = null child.linksIn.delete(l) } } } for (const child of family) { if (child.root !== root) { child.root = root } } // if we had a target, and didn't find one in the new root, then bring // it over as well, but only if we're setting the link into a new root, // as we don't want to lose the target any time we remove a link. if (this.isLink && target && !this.target && root !== this) { target.root = root } if (!this.overrides && this.parent && this.parent.overrides) { this.overrides = this.parent.overrides.getNodeRule(this) } // tree should always be valid upon root setter completion. treeCheck(this) if (this !== root) { treeCheck(root) } } get root () { return this.#root || this } #loadWorkspaces () { if (!this.#workspaces) { return } for (const [name, path] of this.#workspaces.entries()) { new Edge({ from: this, name, spec: `file:${path.replace(/#/g, '%23')}`, type: 'workspace' }) } } [_loadDeps] () { // Caveat! Order is relevant! // Packages in optionalDependencies are optional. // Packages in both deps and devDeps are required. // Note the subtle breaking change from v6: it is no longer possible // to have a different spec for a devDep than production dep. // Linked targets that are disconnected from the tree are tops, // but don't have a 'path' field, only a 'realpath', because we // don't know their canonical location. We don't need their devDeps. const pd = this.package.peerDependencies const ad = this.package.acceptDependencies || {} if (pd && typeof pd === 'object' && !this.legacyPeerDeps) { const pm = this.package.peerDependenciesMeta || {} const peerDependencies = {} const peerOptional = {} for (const [name, dep] of Object.entries(pd)) { if (pm[name]?.optional) { peerOptional[name] = dep } else { peerDependencies[name] = dep } } this.#loadDepType(peerDependencies, 'peer', ad) this.#loadDepType(peerOptional, 'peerOptional', ad) } this.#loadDepType(this.package.dependencies, 'prod', ad) this.#loadDepType(this.package.optionalDependencies, 'optional', ad) const { globalTop, isTop, path, sourceReference } = this const { globalTop: srcGlobalTop, isTop: srcTop, path: srcPath, } = sourceReference || {} const thisDev = isTop && !globalTop && path const srcDev = !sourceReference || srcTop && !srcGlobalTop && srcPath if (thisDev && srcDev) { this.#loadDepType(this.package.devDependencies, 'dev', ad) } } #loadDepType (deps, type, ad) { // Because of the order in which _loadDeps runs, we always want to // prioritize a new edge over an existing one for (const [name, spec] of Object.entries(deps || {})) { const current = this.edgesOut.get(name) if (!current || current.type !== 'workspace') { new Edge({ from: this, name, spec, accept: ad[name], type }) } } } get fsParent () { // in debug setter prevents fsParent from being this return this[_fsParent] } set fsParent (fsParent) { if (!fsParent) { if (this[_fsParent]) { this.root = null } return } debug(() => { if (fsParent === this) { throw new Error('setting node to its own fsParent') } if (fsParent.realpath === this.realpath) { throw new Error('setting fsParent to same path') } // the initial set MUST be an actual walk-up from the realpath // subsequent sets will re-root on the new fsParent's path. if (!this[_fsParent] && this.realpath.indexOf(fsParent.realpath) !== 0) { throw Object.assign(new Error('setting fsParent improperly'), { path: this.path, realpath: this.realpath, fsParent: { path: fsParent.path, realpath: fsParent.realpath, }, }) } }) if (fsParent.isLink) { fsParent = fsParent.target } // setting a thing to its own fsParent is not normal, but no-op for safety if (this === fsParent || fsParent.realpath === this.realpath) { return } // nothing to do if (this[_fsParent] === fsParent) { return } const oldFsParent = this[_fsParent] const newPath = !oldFsParent ? this.path : resolve(fsParent.path, relative(oldFsParent.path, this.path)) const nmPath = resolve(fsParent.path, 'node_modules', this.name) // this is actually the parent, set that instead if (newPath === nmPath) { this.parent = fsParent return } const pathChange = newPath !== this.path // remove from old parent/fsParent const oldParent = this.parent const oldName = this.name if (this.parent) { this.parent.children.delete(this.name) this[_parent] = null } if (this.fsParent) { this.fsParent.fsChildren.delete(this) this[_fsParent] = null } // update this.path/realpath for this and all children/fsChildren if (pathChange) { this[_changePath](newPath) } if (oldParent) { oldParent[_reloadNamedEdges](oldName) } // clobbers anything at that path, resets all appropriate references this.root = fsParent.root } // is it safe to replace one node with another? check the edges to // make sure no one will get upset. Note that the node might end up // having its own unmet dependencies, if the new node has new deps. // Note that there are cases where Arborist will opt to insert a node // into the tree even though this function returns false! This is // necessary when a root dependency is added or updated, or when a // root dependency brings peer deps along with it. In that case, we // will go ahead and create the invalid state, and then try to resolve // it with more tree construction, because it's a user request. canReplaceWith (node, ignorePeers) { if (node.name !== this.name) { return false } if (node.packageName !== this.packageName) { return false } // XXX need to check for two root nodes? if (node.overrides !== this.overrides) { return false } ignorePeers = new Set(ignorePeers) // gather up all the deps of this node and that are only depended // upon by deps of this node. those ones don't count, since // they'll be replaced if this node is replaced anyway. const depSet = gatherDepSet([this], e => e.to !== this && e.valid) for (const edge of this.edgesIn) { // when replacing peer sets, we need to be able to replace the entire // peer group, which means we ignore incoming edges from other peers // within the replacement set. if (!this.isTop && edge.from.parent === this.parent && edge.peer && ignorePeers.has(edge.from.name)) { continue } // only care about edges that don't originate from this node if (!depSet.has(edge.from) && !edge.satisfiedBy(node)) { return false } } return true } canReplace (node, ignorePeers) { return node.canReplaceWith(this, ignorePeers) } // return true if it's safe to remove this node, because anything that // is depending on it would be fine with the thing that they would resolve // to if it was removed, or nothing is depending on it in the first place. canDedupe (preferDedupe = false) { // not allowed to mess with shrinkwraps or bundles if (this.inDepBundle || this.inShrinkwrap) { return false } // it's a top level pkg, or a dep of one if (!this.resolveParent || !this.resolveParent.resolveParent) { return false } // no one wants it, remove it if (this.edgesIn.size === 0) { return true } const other = this.resolveParent.resolveParent.resolve(this.name) // nothing else, need this one if (!other) { return false } // if it's the same thing, then always fine to remove if (other.matches(this)) { return true } // if the other thing can't replace this, then skip it if (!other.canReplace(this)) { return false } // if we prefer dedupe, or if the version is greater/equal, take the other if (preferDedupe || semver.gte(other.version, this.version)) { return true } return false } satisfies (requested) { if (requested instanceof Edge) { return this.name === requested.name && requested.satisfiedBy(this) } const parsed = npa(requested) const { name = this.name, rawSpec: spec } = parsed return this.name === name && this.satisfies(new Edge({ from: new Node({ path: this.root.realpath }), type: 'prod', name, spec, })) } matches (node) { // if the nodes are literally the same object, obviously a match. if (node === this) { return true } // if the names don't match, they're different things, even if // the package contents are identical. if (node.name !== this.name) { return false } // if they're links, they match if the targets match if (this.isLink) { return node.isLink && this.target.matches(node.target) } // if they're two project root nodes, they're different if the paths differ if (this.isProjectRoot && node.isProjectRoot) { return this.path === node.path } // if the integrity matches, then they're the same. if (this.integrity && node.integrity) { return this.integrity === node.integrity } // if no integrity, check resolved if (this.resolved && node.resolved) { return this.resolved === node.resolved } // if no resolved, check both package name and version // otherwise, conclude that they are different things return this.packageName && node.packageName && this.packageName === node.packageName && this.version && node.version && this.version === node.version } // replace this node with the supplied argument // Useful when mutating an ideal tree, so we can avoid having to call // the parent/root setters more than necessary. replaceWith (node) { node.replace(this) } replace (node) { this[_delistFromMeta]() // if the name matches, but is not identical, we are intending to clobber // something case-insensitively, so merely setting name and path won't // have the desired effect. just set the path so it'll collide in the // parent's children map, and leave it at that. if (node.parent?.children.get(this.name) === node) { this.path = resolve(node.parent.path, 'node_modules', this.name) } else { this.path = node.path this.name = node.name } if (!this.isLink) { this.realpath = this.path } this[_refreshLocation]() // keep children when a node replaces another if (!this.isLink) { for (const kid of node.children.values()) { kid.parent = this } if (node.isLink && node.target) { node.target.root = null } } if (!node.isRoot) { this.root = node.root } treeCheck(this) } get inShrinkwrap () { return this.parent && (this.parent.hasShrinkwrap || this.parent.inShrinkwrap) } get parent () { // setter prevents _parent from being this return this[_parent] } // This setter keeps everything in order when we move a node from // one point in a logical tree to another. Edges get reloaded, // metadata updated, etc. It's also called when we *replace* a node // with another by the same name (eg, to update or dedupe). // This does a couple of walks out on the node_modules tree, recursing // into child nodes. However, as setting the parent is typically done // with nodes that don't have have many children, and (deduped) package // trees tend to be broad rather than deep, it's not that bad. // The only walk that starts from the parent rather than this node is // limited by edge name. set parent (parent) { // when setting to null, just remove it from the tree entirely if (!parent) { // but only delete it if we actually had a parent in the first place // otherwise it's just setting to null when it's already null if (this[_parent]) { this.root = null } return } if (parent.isLink) { parent = parent.target } // setting a thing to its own parent is not normal, but no-op for safety if (this === parent) { return } const oldParent = this[_parent] // nothing to do if (oldParent === parent) { return } // ok now we know something is actually changing, and parent is not a link const newPath = resolve(parent.path, 'node_modules', this.name) const pathChange = newPath !== this.path // remove from old parent/fsParent if (oldParent) { oldParent.children.delete(this.name) this[_parent] = null } if (this.fsParent) { this.fsParent.fsChildren.delete(this) this[_fsParent] = null } // update this.path/realpath for this and all children/fsChildren if (pathChange) { this[_changePath](newPath) } if (parent.overrides) { this.overrides = parent.overrides.getNodeRule(this) } // clobbers anything at that path, resets all appropriate references this.root = parent.root } // Call this before changing path or updating the _root reference. // Removes the node from its root the metadata and inventory. [_delistFromMeta] () { const root = this.root if (!root.realpath || !this.path) { return } root.inventory.delete(this) root.tops.delete(this) if (root.meta) { root.meta.delete(this.path) } /* istanbul ignore next - should be impossible */ debug(() => { if ([...root.inventory.values()].includes(this)) { throw new Error('failed to delist') } }) } // update this.path/realpath and the paths of all children/fsChildren [_changePath] (newPath) { // have to de-list before changing paths this[_delistFromMeta]() const oldPath = this.path this.path = newPath const namePattern = /(?:^|\/|\\)node_modules[\\/](@[^/\\]+[\\/][^\\/]+|[^\\/]+)$/ const nameChange = newPath.match(namePattern) if (nameChange && this.name !== nameChange[1]) { this.name = nameChange[1].replace(/\\/g, '/') } // if we move a link target, update link realpaths if (!this.isLink) { this.realpath = newPath for (const link of this.linksIn) { link[_delistFromMeta]() link.realpath = newPath link[_refreshLocation]() } } // if we move /x to /y, then a module at /x/a/b becomes /y/a/b for (const child of this.fsChildren) { child[_changePath](resolve(newPath, relative(oldPath, child.path))) } for (const [name, child] of this.children.entries()) { child[_changePath](resolve(newPath, 'node_modules', name)) } this[_refreshLocation]() } // Called whenever the root/parent is changed. // NB: need to remove from former root's meta/inventory and then update // this.path BEFORE calling this method! [_refreshLocation] () { const root = this.root const loc = relpath(root.realpath, this.path) this.location = loc root.inventory.add(this) if (root.meta) { root.meta.add(this) } } assertRootOverrides () { if (!this.isProjectRoot || !this.overrides) { return } for (const edge of this.edgesOut.values()) { // if these differ an override has been applied, those are not allowed // for top level dependencies so throw an error if (edge.spec !== edge.rawSpec && !edge.spec.startsWith('$')) { throw Object.assign(new Error(`Override for ${edge.name}@${edge.rawSpec} conflicts with direct dependency`), { code: 'EOVERRIDE' }) } } } addEdgeOut (edge) { if (this.overrides) { edge.overrides = this.overrides.getEdgeRule(edge) } this.edgesOut.set(edge.name, edge) } addEdgeIn (edge) { if (edge.overrides) { this.overrides = edge.overrides } this.edgesIn.add(edge) // try to get metadata from the yarn.lock file if (this.root.meta) { this.root.meta.addEdge(edge) } } [_reloadNamedEdges] (name, rootLoc = this.location) { const edge = this.edgesOut.get(name) // if we don't have an edge, do nothing, but keep descending const rootLocResolved = edge && edge.to && edge.to.location === `${rootLoc}/node_modules/${edge.name}` const sameResolved = edge && this.resolve(name) === edge.to const recheck = rootLocResolved || !sameResolved if (edge && recheck) { edge.reload(true) } for (const c of this.children.values()) { c[_reloadNamedEdges](name, rootLoc) } for (const c of this.fsChildren) { c[_reloadNamedEdges](name, rootLoc) } } get isLink () { return false } get target () { return this } set target (n) { debug(() => { throw Object.assign(new Error('cannot set target on non-Link Nodes'), { path: this.path, }) }) } get depth () { if (this.isTop) { return 0 } return this.parent.depth + 1 } get isTop () { return !this.parent || this.globalTop } get top () { if (this.isTop) { return this } return this.parent.top } get isFsTop () { return !this.fsParent } get fsTop () { if (this.isFsTop) { return this } return this.fsParent.fsTop } get resolveParent () { return this.parent || this.fsParent } resolve (name) { /* istanbul ignore next - should be impossible, * but I keep doing this mistake in tests */ debug(() => { if (typeof name !== 'string' || !name) { throw new Error('non-string passed to Node.resolve') } }) const mine = this.children.get(name) if (mine) { return mine } const resolveParent = this.resolveParent if (resolveParent) { return resolveParent.resolve(name) } return null } inNodeModules () { const rp = this.realpath const name = this.name const scoped = name.charAt(0) === '@' const d = dirname(rp) const nm = scoped ? dirname(d) : d const dir = dirname(nm) const base = scoped ? `${basename(d)}/${basename(rp)}` : basename(rp) return base === name && basename(nm) === 'node_modules' ? dir : false } // maybe accept both string value or array of strings // seems to be what dom API does querySelectorAll (query, opts) { return querySelectorAll(this, query, opts) } toJSON () { return printableTree(this) } [util.inspect.custom] () { return this.toJSON() } } module.exports = Node PK]�\|��arborist/lib/gather-dep-set.jsnu�[���// Given a set of nodes in a tree, and a filter function to test // incoming edges to the dep set that should be ignored otherwise. // // find the set of deps that are only depended upon by nodes in the set, or // their dependencies, or edges that are ignored. // // Used when figuring out what to prune when replacing a node with a newer // version, or when an optional dep fails to install. const gatherDepSet = (set, edgeFilter) => { const deps = new Set(set) // add the full set of dependencies. note that this loop will continue // as the deps set increases in size. for (const node of deps) { for (const edge of node.edgesOut.values()) { if (edge.to && edgeFilter(edge)) { deps.add(edge.to) } } } // now remove all nodes in the set that have a dependant outside the set // if any change is made, then re-check // continue until no changes made, or deps set evaporates fully. let changed = true while (changed === true && deps.size > 0) { changed = false for (const dep of deps) { for (const edge of dep.edgesIn) { if (!deps.has(edge.from) && edgeFilter(edge)) { changed = true deps.delete(dep) break } } } } return deps } module.exports = gatherDepSet PK]�\�}?�7�7arborist/lib/can-place-dep.jsnu�[���// Internal methods used by buildIdealTree. // Answer the question: "can I put this dep here?" // // IMPORTANT: *nothing* in this class should *ever* modify or mutate the tree // at all. The contract here is strictly limited to read operations. We call // this in the process of walking through the ideal tree checking many // different potential placement targets for a given node. If a change is made // to the tree along the way, that can cause serious problems! // // In order to enforce this restriction, in debug mode, canPlaceDep() will // snapshot the tree at the start of the process, and then at the end, will // verify that it still matches the snapshot, and throw an error if any changes // occurred. // // The algorithm is roughly like this: // - check the node itself: // - if there is no version present, and no conflicting edges from target, // OK, provided all peers can be placed at or above the target. // - if the current version matches, KEEP // - if there is an older version present, which can be replaced, then // - if satisfying and preferDedupe? KEEP // - else: REPLACE // - if there is a newer version present, and preferDedupe, REPLACE // - if the version present satisfies the edge, KEEP // - else: CONFLICT // - if the node is not in conflict, check each of its peers: // - if the peer can be placed in the target, continue // - else if the peer can be placed in a parent, and there is no other // conflicting version shadowing it, continue // - else CONFLICT // - If the peers are not in conflict, return the original node's value // // An exception to this logic is that if the target is the deepest location // that a node can be placed, and the conflicting node can be placed deeper, // then we will return REPLACE rather than CONFLICT, and Arborist will queue // the replaced node for resolution elsewhere. const localeCompare = require('@isaacs/string-locale-compare')('en') const semver = require('semver') const debug = require('./debug.js') const peerEntrySets = require('./peer-entry-sets.js') const deepestNestingTarget = require('./deepest-nesting-target.js') const CONFLICT = Symbol('CONFLICT') const OK = Symbol('OK') const REPLACE = Symbol('REPLACE') const KEEP = Symbol('KEEP') class CanPlaceDep { // dep is a dep that we're trying to place. it should already live in // a virtual tree where its peer set is loaded as children of the root. // target is the actual place where we're trying to place this dep // in a node_modules folder. // edge is the edge that we're trying to satisfy with this placement. // parent is the CanPlaceDep object of the entry node when placing a peer. constructor (options) { const { dep, target, edge, preferDedupe, parent = null, peerPath = [], explicitRequest = false, } = options debug(() => { if (!dep) { throw new Error('no dep provided to CanPlaceDep') } if (!target) { throw new Error('no target provided to CanPlaceDep') } if (!edge) { throw new Error('no edge provided to CanPlaceDep') } this._treeSnapshot = JSON.stringify([...target.root.inventory.entries()] .map(([loc, { packageName, version, resolved }]) => { return [loc, packageName, version, resolved] }).sort(([a], [b]) => localeCompare(a, b))) }) // the result of whether we can place it or not this.canPlace = null // if peers conflict, but this one doesn't, then that is useful info this.canPlaceSelf = null this.dep = dep this.target = target this.edge = edge this.explicitRequest = explicitRequest // preventing cycles when we check peer sets this.peerPath = peerPath // we always prefer to dedupe peers, because they are trying // a bit harder to be singletons. this.preferDedupe = !!preferDedupe || edge.peer this.parent = parent this.children = [] this.isSource = target === this.peerSetSource this.name = edge.name this.current = target.children.get(this.name) this.targetEdge = target.edgesOut.get(this.name) this.conflicts = new Map() // check if this dep was already subject to a peerDep override while // building the peerSet. this.edgeOverride = !dep.satisfies(edge) this.canPlace = this.checkCanPlace() if (!this.canPlaceSelf) { this.canPlaceSelf = this.canPlace } debug(() => { const treeSnapshot = JSON.stringify([...target.root.inventory.entries()] .map(([loc, { packageName, version, resolved }]) => { return [loc, packageName, version, resolved] }).sort(([a], [b]) => localeCompare(a, b))) /* istanbul ignore if */ if (this._treeSnapshot !== treeSnapshot) { throw Object.assign(new Error('tree changed in CanPlaceDep'), { expect: this._treeSnapshot, actual: treeSnapshot, }) } }) } checkCanPlace () { const { target, targetEdge, current, dep } = this // if the dep failed to load, we're going to fail the build or // prune it out anyway, so just move forward placing/replacing it. if (dep.errors.length) { return current ? REPLACE : OK } // cannot place peers inside their dependents, except for tops if (targetEdge && targetEdge.peer && !target.isTop) { return CONFLICT } // skip this test if there's a current node, because we might be able // to dedupe against it anyway if (!current && targetEdge && !dep.satisfies(targetEdge) && targetEdge !== this.edge) { return CONFLICT } return current ? this.checkCanPlaceCurrent() : this.checkCanPlaceNoCurrent() } // we know that the target has a dep by this name in its node_modules // already. Can return KEEP, REPLACE, or CONFLICT. checkCanPlaceCurrent () { const { preferDedupe, explicitRequest, current, target, edge, dep } = this if (dep.matches(current)) { if (current.satisfies(edge) || this.edgeOverride) { return explicitRequest ? REPLACE : KEEP } } const { version: curVer } = current const { version: newVer } = dep const tryReplace = curVer && newVer && semver.gte(newVer, curVer) if (tryReplace && dep.canReplace(current)) { // It's extremely rare that a replaceable node would be a conflict, if // the current one wasn't a conflict, but it is theoretically possible // if peer deps are pinned. In that case we treat it like any other // conflict, and keep trying. const cpp = this.canPlacePeers(REPLACE) if (cpp !== CONFLICT) { return cpp } } // ok, can't replace the current with new one, but maybe current is ok? if (current.satisfies(edge) && (!explicitRequest || preferDedupe)) { return KEEP } // if we prefer deduping, then try replacing newer with older if (preferDedupe && !tryReplace && dep.canReplace(current)) { const cpp = this.canPlacePeers(REPLACE) if (cpp !== CONFLICT) { return cpp } } // Check for interesting cases! // First, is this the deepest place that this thing can go, and NOT the // deepest place where the conflicting dep can go? If so, replace it, // and let it re-resolve deeper in the tree. const myDeepest = this.deepestNestingTarget // ok, i COULD be placed deeper, so leave the current one alone. if (target !== myDeepest) { return CONFLICT } // if we are not checking a peerDep, then we MUST place it here, in the // target that has a non-peer dep on it. if (!edge.peer && target === edge.from) { return this.canPlacePeers(REPLACE) } // if we aren't placing a peer in a set, then we're done here. // This is ignored because it SHOULD be redundant, as far as I can tell, // with the deepest target and target===edge.from tests. But until we // can prove that isn't possible, this condition is here for safety. /* istanbul ignore if - allegedly impossible */ if (!this.parent && !edge.peer) { return CONFLICT } // check the deps in the peer group for each edge into that peer group // if ALL of them can be pushed deeper, or if it's ok to replace its // members with the contents of the new peer group, then we're good. let canReplace = true for (const [entryEdge, currentPeers] of peerEntrySets(current)) { if (entryEdge === this.edge || entryEdge === this.peerEntryEdge) { continue } // First, see if it's ok to just replace the peerSet entirely. // we do this by walking out from the entryEdge, because in a case like // this: // // v -> PEER(a@1||2) // a@1 -> PEER(b@1) // a@2 -> PEER(b@2) // b@1 -> PEER(a@1) // b@2 -> PEER(a@2) // // root // +-- v // +-- a@2 // +-- b@2 // // Trying to place a peer group of (a@1, b@1) would fail to note that // they can be replaced, if we did it by looping 1 by 1. If we are // replacing something, we don't have to check its peer deps, because // the peerDeps in the placed peerSet will presumably satisfy. const entryNode = entryEdge.to const entryRep = dep.parent.children.get(entryNode.name) if (entryRep) { if (entryRep.canReplace(entryNode, dep.parent.children.keys())) { continue } } let canClobber = !entryRep if (!entryRep) { const peerReplacementWalk = new Set([entryNode]) OUTER: for (const currentPeer of peerReplacementWalk) { for (const edge of currentPeer.edgesOut.values()) { if (!edge.peer || !edge.valid) { continue } const rep = dep.parent.children.get(edge.name) if (!rep) { if (edge.to) { peerReplacementWalk.add(edge.to) } continue } if (!rep.satisfies(edge)) { canClobber = false break OUTER } } } } if (canClobber) { continue } // ok, we can't replace, but maybe we can nest the current set deeper? let canNestCurrent = true for (const currentPeer of currentPeers) { if (!canNestCurrent) { break } // still possible to nest this peerSet const curDeep = deepestNestingTarget(entryEdge.from, currentPeer.name) if (curDeep === target || target.isDescendantOf(curDeep)) { canNestCurrent = false canReplace = false } if (canNestCurrent) { continue } } } // if we can nest or replace all the current peer groups, we can replace. if (canReplace) { return this.canPlacePeers(REPLACE) } return CONFLICT } checkCanPlaceNoCurrent () { const { target, peerEntryEdge, dep, name } = this // check to see what that name resolves to here, and who may depend on // being able to reach it by crawling up past the parent. we know // that it's not the target's direct child node, and if it was a direct // dep of the target, we would have conflicted earlier. const current = target !== peerEntryEdge.from && target.resolve(name) if (current) { for (const edge of current.edgesIn.values()) { if (edge.from.isDescendantOf(target) && edge.valid) { if (!dep.satisfies(edge)) { return CONFLICT } } } } // no objections, so this is fine as long as peers are ok here. return this.canPlacePeers(OK) } get deepestNestingTarget () { const start = this.parent ? this.parent.deepestNestingTarget : this.edge.from return deepestNestingTarget(start, this.name) } get conflictChildren () { return this.allChildren.filter(c => c.canPlace === CONFLICT) } get allChildren () { const set = new Set(this.children) for (const child of set) { for (const grandchild of child.children) { set.add(grandchild) } } return [...set] } get top () { return this.parent ? this.parent.top : this } // check if peers can go here. returns state or CONFLICT canPlacePeers (state) { this.canPlaceSelf = state if (this._canPlacePeers) { return this._canPlacePeers } // TODO: represent peerPath in ERESOLVE error somehow? const peerPath = [...this.peerPath, this.dep] let sawConflict = false for (const peerEdge of this.dep.edgesOut.values()) { if (!peerEdge.peer || !peerEdge.to || peerPath.includes(peerEdge.to)) { continue } const peer = peerEdge.to // it may be the case that the *initial* dep can be nested, but a peer // of that dep needs to be placed shallower, because the target has // a peer dep on the peer as well. const target = deepestNestingTarget(this.target, peer.name) const cpp = new CanPlaceDep({ dep: peer, target, parent: this, edge: peerEdge, peerPath, // always place peers in preferDedupe mode preferDedupe: true, }) /* istanbul ignore next */ debug(() => { if (this.children.some(c => c.dep === cpp.dep)) { throw new Error('checking same dep repeatedly') } }) this.children.push(cpp) if (cpp.canPlace === CONFLICT) { sawConflict = true } } this._canPlacePeers = sawConflict ? CONFLICT : state return this._canPlacePeers } // what is the node that is causing this peerSet to be placed? get peerSetSource () { return this.parent ? this.parent.peerSetSource : this.edge.from } get peerEntryEdge () { return this.top.edge } static get CONFLICT () { return CONFLICT } static get OK () { return OK } static get REPLACE () { return REPLACE } static get KEEP () { return KEEP } get description () { const { canPlace } = this return canPlace && canPlace.description || /* istanbul ignore next - old node affordance */ canPlace } } module.exports = CanPlaceDep PK]�\�S��K&K&arborist/lib/diff.jsnu�[���// a tree representing the difference between two trees // A Diff node's parent is not necessarily the parent of // the node location it refers to, but rather the highest level // node that needs to be either changed or removed. // Thus, the root Diff node is the shallowest change required // for a given branch of the tree being mutated. const { depth } = require('treeverse') const { existsSync } = require('node:fs') const ssri = require('ssri') class Diff { constructor ({ actual, ideal, filterSet, shrinkwrapInflated }) { this.filterSet = filterSet this.shrinkwrapInflated = shrinkwrapInflated this.children = [] this.actual = actual this.ideal = ideal if (this.ideal) { this.resolved = this.ideal.resolved this.integrity = this.ideal.integrity } this.action = getAction(this) this.parent = null // the set of leaf nodes that we rake up to the top level this.leaves = [] // the set of nodes that don't change in this branch of the tree this.unchanged = [] // the set of nodes that will be removed in this branch of the tree this.removed = [] } static calculate ({ actual, ideal, filterNodes = [], shrinkwrapInflated = new Set(), }) { // if there's a filterNode, then: // - get the path from the root to the filterNode. The root or // root.target should have an edge either to the filterNode or // a link to the filterNode. If not, abort. Add the path to the // filterSet. // - Add set of Nodes depended on by the filterNode to filterSet. // - Anything outside of that set should be ignored by getChildren const filterSet = new Set() const extraneous = new Set() for (const filterNode of filterNodes) { const { root } = filterNode if (root !== ideal && root !== actual) { throw new Error('invalid filterNode: outside idealTree/actualTree') } const rootTarget = root.target const edge = [...rootTarget.edgesOut.values()].filter(e => { return e.to && (e.to === filterNode || e.to.target === filterNode) })[0] filterSet.add(root) filterSet.add(rootTarget) filterSet.add(ideal) filterSet.add(actual) if (edge && edge.to) { filterSet.add(edge.to) filterSet.add(edge.to.target) } filterSet.add(filterNode) depth({ tree: filterNode, visit: node => filterSet.add(node), getChildren: node => { node = node.target const loc = node.location const idealNode = ideal.inventory.get(loc) const ideals = !idealNode ? [] : [...idealNode.edgesOut.values()].filter(e => e.to).map(e => e.to) const actualNode = actual.inventory.get(loc) const actuals = !actualNode ? [] : [...actualNode.edgesOut.values()].filter(e => e.to).map(e => e.to) if (actualNode) { for (const child of actualNode.children.values()) { if (child.extraneous) { extraneous.add(child) } } } return ideals.concat(actuals) }, }) } for (const extra of extraneous) { filterSet.add(extra) } return depth({ tree: new Diff({ actual, ideal, filterSet, shrinkwrapInflated }), getChildren, leave, }) } } const getAction = ({ actual, ideal }) => { if (!ideal) { return 'REMOVE' } // bundled meta-deps are copied over to the ideal tree when we visit it, // so they'll appear to be missing here. There's no need to handle them // in the diff, though, because they'll be replaced at reify time anyway // Otherwise, add the missing node. if (!actual) { return ideal.inDepBundle ? null : 'ADD' } // always ignore the root node if (ideal.isRoot && actual.isRoot) { return null } // if the versions don't match, it's a change no matter what if (ideal.version !== actual.version) { return 'CHANGE' } const binsExist = ideal.binPaths.every((path) => existsSync(path)) // top nodes, links, and git deps won't have integrity, but do have resolved // if neither node has integrity, the bins exist, and either (a) neither // node has a resolved value or (b) they both do and match, then we can // leave this one alone since we already know the versions match due to // the condition above. The "neither has resolved" case (a) cannot be // treated as a 'mark CHANGE and refetch', because shrinkwraps, bundles, // and link deps may lack this information, and we don't want to try to // go to the registry for something that isn't there. const noIntegrity = !ideal.integrity && !actual.integrity const noResolved = !ideal.resolved && !actual.resolved const resolvedMatch = ideal.resolved && ideal.resolved === actual.resolved if (noIntegrity && binsExist && (resolvedMatch || noResolved)) { return null } // otherwise, verify that it's the same bits // note that if ideal has integrity, and resolved doesn't, we treat // that as a 'change', so that it gets re-fetched and locked down. const integrityMismatch = !ideal.integrity || !actual.integrity || !ssri.parse(ideal.integrity).match(actual.integrity) if (integrityMismatch || !binsExist) { return 'CHANGE' } return null } const allChildren = node => { if (!node) { return new Map() } // if the node is root, and also a link, then what we really // want is to traverse the target's children if (node.isRoot && node.isLink) { return allChildren(node.target) } const kids = new Map() for (const n of [node, ...node.fsChildren]) { for (const kid of n.children.values()) { kids.set(kid.path, kid) } } return kids } // functions for the walk options when we traverse the trees // to create the diff tree const getChildren = diff => { const children = [] const { actual, ideal, unchanged, removed, filterSet, shrinkwrapInflated, } = diff // Note: we DON'T diff fsChildren themselves, because they are either // included in the package contents, or part of some other project, and // will never appear in legacy shrinkwraps anyway. but we _do_ include the // child nodes of fsChildren, because those are nodes that we are typically // responsible for installing. const actualKids = allChildren(actual) const idealKids = allChildren(ideal) if (ideal && ideal.hasShrinkwrap && !shrinkwrapInflated.has(ideal)) { // Guaranteed to get a diff.leaves here, because we always // be called with a proper Diff object when ideal has a shrinkwrap // that has not been inflated. diff.leaves.push(diff) return children } const paths = new Set([...actualKids.keys(), ...idealKids.keys()]) for (const path of paths) { const actual = actualKids.get(path) const ideal = idealKids.get(path) diffNode({ actual, ideal, children, unchanged, removed, filterSet, shrinkwrapInflated, }) } if (diff.leaves && !children.length) { diff.leaves.push(diff) } return children } const diffNode = ({ actual, ideal, children, unchanged, removed, filterSet, shrinkwrapInflated, }) => { if (filterSet.size && !(filterSet.has(ideal) || filterSet.has(actual))) { return } const action = getAction({ actual, ideal }) // if it's a match, then get its children // otherwise, this is the child diff node if (action || (!shrinkwrapInflated.has(ideal) && ideal.hasShrinkwrap)) { if (action === 'REMOVE') { removed.push(actual) } children.push(new Diff({ actual, ideal, filterSet, shrinkwrapInflated })) } else { unchanged.push(ideal) // !*! Weird dirty hack warning !*! // // Bundled deps aren't loaded in the ideal tree, because we don't know // what they are going to be without unpacking. Swap them over now if // the bundling node isn't changing, so we don't prune them later. // // It's a little bit dirty to be doing this here, since it means that // diffing trees can mutate them, but otherwise we have to walk over // all unchanging bundlers and correct the diff later, so it's more // efficient to just fix it while we're passing through already. // // Note that moving over a bundled dep will break the links to other // deps under this parent, which may have been transitively bundled. // Breaking those links means that we'll no longer see the transitive // dependency, meaning that it won't appear as bundled any longer! // In order to not end up dropping transitively bundled deps, we have // to get the list of nodes to move, then move them all at once, rather // than moving them one at a time in the first loop. const bd = ideal.package.bundleDependencies if (actual && bd && bd.length) { const bundledChildren = [] for (const node of actual.children.values()) { if (node.inBundle) { bundledChildren.push(node) } } for (const node of bundledChildren) { node.parent = ideal } } children.push(...getChildren({ actual, ideal, unchanged, removed, filterSet, shrinkwrapInflated, })) } } // set the parentage in the leave step so that we aren't attaching // child nodes only to remove them later. also bubble up the unchanged // nodes so that we can move them out of staging in the reification step. const leave = (diff, children) => { children.forEach(kid => { kid.parent = diff diff.leaves.push(...kid.leaves) diff.unchanged.push(...kid.unchanged) diff.removed.push(...kid.removed) }) diff.children = children return diff } module.exports = Diff PK]�\RF���arborist/lib/signal-handling.jsnu�[���const signals = require('./signals.js') // for testing, expose the process being used module.exports = Object.assign(fn => setup(fn), { process }) // do all of this in a setup function so that we can call it // multiple times for multiple reifies that might be going on. // Otherwise, Arborist.reify() is a global action, which is a // new constraint we'd be adding with this behavior. const setup = fn => { const { process } = module.exports const sigListeners = { loaded: false } const unload = () => { if (!sigListeners.loaded) { return } for (const sig of signals) { try { process.removeListener(sig, sigListeners[sig]) } catch { // ignore errors } } process.removeListener('beforeExit', onBeforeExit) sigListeners.loaded = false } const onBeforeExit = () => { // this trick ensures that we exit with the same signal we caught // Ie, if you press ^C and npm gets a SIGINT, we'll do the rollback // and then exit with a SIGINT signal once we've removed the handler. // The timeout is there because signals are asynchronous, so we need // the process to NOT exit on its own, which means we have to have // something keeping the event loop looping. Hence this hack. unload() process.kill(process.pid, signalReceived) setTimeout(() => {}, 500) } let signalReceived = null const listener = (sig, fn) => () => { signalReceived = sig // if we exit normally, but caught a signal which would have been fatal, // then re-send it once we're done with whatever cleanup we have to do. unload() if (process.listeners(sig).length < 1) { process.once('beforeExit', onBeforeExit) } fn({ signal: sig }) } // do the actual loading here for (const sig of signals) { sigListeners[sig] = listener(sig, fn) const max = process.getMaxListeners() try { // if we call this a bunch of times, avoid triggering the warning const { length } = process.listeners(sig) if (length >= max) { process.setMaxListeners(length + 1) } process.on(sig, sigListeners[sig]) } catch { // ignore errors } } sigListeners.loaded = true return unload } PK]�\�~�llarborist/bin/virtual.jsnu�[���const Arborist = require('../') const printTree = require('./lib/print-tree.js') module.exports = (options, time) => new Arborist(options) .loadVirtual() .then(time) .then(async ({ timing, result: tree }) => { printTree(tree) if (options.save) { await tree.meta.save() } return `read ${tree.inventory.size} deps in ${timing.ms}` }) PK]�\�t���arborist/bin/lib/logging.jsnu�[���const { log } = require('proc-log') const fs = require('node:fs') const { dirname } = require('node:path') const os = require('node:os') const { inspect, format } = require('node:util') const { bin: options } = require('./options.js') // add a meta method to proc-log for passing optional // metadata through to log handlers const META = Symbol('meta') const parseArgs = (...args) => { const { [META]: isMeta } = args[args.length - 1] || {} return isMeta ? [args[args.length - 1], ...args.slice(0, args.length - 1)] : [{}, ...args] } log.meta = (meta = {}) => ({ [META]: true, ...meta }) const levels = new Map([ 'silly', 'verbose', 'info', 'http', 'notice', 'warn', 'error', 'silent', ].map((level, index) => [level, index])) const addLogListener = (write, { eol = os.EOL, loglevel = 'silly', colors = false } = {}) => { const levelIndex = levels.get(loglevel) const magenta = m => colors ? `\x1B[35m${m}\x1B[39m` : m const dim = m => colors ? `\x1B[2m${m}\x1B[22m` : m const red = m => colors ? `\x1B[31m${m}\x1B[39m` : m const formatter = (level, ...args) => { const depth = level === 'error' && args[0] && args[0].code === 'ERESOLVE' ? Infinity : 10 if (level === 'info' && args[0] === 'timeEnd') { args[1] = dim(args[1]) } else if (level === 'error' && args[0] === 'timeError') { args[1] = red(args[1]) } const messages = args.map(a => typeof a === 'string' ? a : inspect(a, { depth, colors })) const pref = `${process.pid} ${magenta(level)} ` return pref + format(...messages).trim().split('\n').join(`${eol}${pref}`) + eol } process.on('log', (...args) => { const [meta, level, ...logArgs] = parseArgs(...args) if (levelIndex <= levels.get(level) || meta.force) { write(formatter(level, ...logArgs)) } }) } if (options.loglevel !== 'silent') { addLogListener((v) => process.stderr.write(v), { eol: '\n', colors: options.colors, loglevel: options.loglevel, }) } if (options.logfile) { log.silly('logfile', options.logfile) fs.mkdirSync(dirname(options.logfile), { recursive: true }) const fd = fs.openSync(options.logfile, 'a') addLogListener((str) => fs.writeSync(fd, str)) } module.exports = log PK]�\{��ʝ�arborist/bin/lib/print-tree.jsnu�[���const { inspect } = require('node:util') const log = require('./logging.js') module.exports = tree => log.info(inspect(tree.toJSON(), { depth: Infinity })) PK]�\=��arborist/bin/lib/timers.jsnu�[���const { bin: options } = require('./options.js') const log = require('./logging.js') const timers = new Map() const finished = new Map() process.on('time', (level, name) => { if (level === 'start') { if (timers.has(name)) { throw new Error('conflicting timer! ' + name) } timers.set(name, process.hrtime.bigint()) } else if (level === 'end') { if (!timers.has(name)) { throw new Error('timer not started! ' + name) } const elapsed = Number(process.hrtime.bigint() - timers.get(name)) timers.delete(name) finished.set(name, elapsed) if (options.timing) { log.info('timeEnd', `${name} ${elapsed / 1e9}s`, log.meta({ force: options.timing === 'always' })) } } }) process.on('exit', () => { for (const name of timers.keys()) { log.error('timeError', 'Dangling timer:', name) process.exitCode = 1 } }) module.exports = finished PK]�\ _���arborist/bin/lib/options.jsnu�[���const nopt = require('nopt') const path = require('node:path') const has = (o, k) => Object.prototype.hasOwnProperty.call(o, k) const cleanPath = (val) => { const k = Symbol('key') const data = {} nopt.typeDefs.path.validate(data, k, val) return data[k] } const parse = (...noptArgs) => { const binOnlyOpts = { command: String, loglevel: String, colors: Boolean, timing: ['always', Boolean], logfile: String, } const arbOpts = { add: Array, rm: Array, omit: Array, update: Array, workspaces: Array, global: Boolean, force: Boolean, 'global-style': Boolean, 'prefer-dedupe': Boolean, 'legacy-peer-deps': Boolean, 'update-all': Boolean, before: Date, path: path, cache: path, ...binOnlyOpts, } const short = { quiet: ['--loglevel', 'warn'], logs: ['--logfile', 'true'], w: '--workspaces', g: '--global', f: '--force', } const defaults = { // key order is important for command and path // since they shift positional args // command is 1st, path is 2nd command: (o) => o.argv.remain.shift(), path: (o) => cleanPath(o.argv.remain.shift() || '.'), colors: has(process.env, 'NO_COLOR') ? false : !!process.stderr.isTTY, loglevel: 'silly', timing: (o) => o.loglevel === 'silly', cache: `${process.env.HOME}/.npm/_cacache`, } const derived = [ // making update either `all` or an array of names but not both ({ updateAll: all, update: names, ...o }) => { if (all || names) { o.update = all != null ? { all } : { names } } return o }, ({ logfile, ...o }) => { // logfile is parsed as a string so if its true or set but empty // then set the default logfile if (logfile === 'true' || logfile === '') { logfile = `arb-log-${new Date().toISOString().replace(/[.:]/g, '_')}.log` } // then parse it the same as nopt parses other paths if (logfile) { o.logfile = cleanPath(logfile) } return o }, ] const transforms = [ // Camelcase all top level keys (o) => { const entries = Object.entries(o).map(([k, v]) => [ k.replace(/-./g, s => s[1].toUpperCase()), v, ]) return Object.fromEntries(entries) }, // Set defaults on unset keys (o) => { for (const [k, v] of Object.entries(defaults)) { if (!has(o, k)) { o[k] = typeof v === 'function' ? v(o) : v } } return o }, // Set/unset derived values ...derived.map((derive) => (o) => derive(o) || o), // Separate bin and arborist options ({ argv: { remain: _ }, ...o }) => { const bin = { _ } for (const k of Object.keys(binOnlyOpts)) { if (has(o, k)) { bin[k] = o[k] delete o[k] } } return { bin, arb: o } }, ] let options = nopt(arbOpts, short, ...noptArgs) for (const t of transforms) { options = t(options) } return options } module.exports = parse() PK]�\|[�vvarborist/bin/funding.jsnu�[���const Arborist = require('../') const log = require('./lib/logging.js') module.exports = (options, time) => { const query = options._.shift() const a = new Arborist(options) return a .loadVirtual() .then(tree => { // only load the actual tree if the virtual one doesn't have modern metadata if (!tree.meta || !(tree.meta.originalLockfileVersion >= 2)) { log.error('old metadata, load actual') throw 'load actual' } else { log.error('meta ok, return virtual tree') return tree } }) .catch(() => a.loadActual()) .then(time) .then(({ timing, result: tree }) => { if (!query) { for (const node of tree.inventory.values()) { if (node.package.funding) { log.info(node.name, node.location, node.package.funding) } } } else { for (const node of tree.inventory.query('name', query)) { if (node.package.funding) { log.info(node.name, node.location, node.package.funding) } } } return `read ${tree.inventory.size} deps in ${timing.ms}` }) } PK]�\�no� arborist/bin/index.jsnu�[���#!/usr/bin/env node const fs = require('node:fs') const path = require('node:path') const { time } = require('proc-log') const { bin, arb: options } = require('./lib/options') const version = require('../package.json').version const usage = (message = '') => `Arborist - the npm tree doctor Version: ${version} ${message && '\n' + message + '\n'} # USAGE arborist <cmd> [path] [options...] # COMMANDS * reify: reify ideal tree to node_modules (install, update, rm, ...) * prune: prune the ideal tree and reify (like npm prune) * ideal: generate and print the ideal tree * actual: read and print the actual tree in node_modules * virtual: read and print the virtual tree in the local shrinkwrap file * shrinkwrap: load a local shrinkwrap and print its data * audit: perform a security audit on project dependencies * funding: query funding information in the local package tree. A second positional argument after the path name can limit to a package name. * license: query license information in the local package tree. A second positional argument after the path name can limit to a license type. * help: print this text * version: print the version # OPTIONS Most npm options are supported, but in camelCase rather than css-case. For example, instead of '--dry-run', use '--dryRun'. Additionally: * --loglevel=warn|--quiet will supppress the printing of package trees * --logfile <file|bool> will output logs to a file * --timing will show timing information * Instead of 'npm install <pkg>', use 'arborist reify --add=<pkg>'. The '--add=<pkg>' option can be specified multiple times. * Instead of 'npm rm <pkg>', use 'arborist reify --rm=<pkg>'. The '--rm=<pkg>' option can be specified multiple times. * Instead of 'npm update', use 'arborist reify --update-all'. * 'npm audit fix' is 'arborist audit --fix' ` const commands = { version: () => console.log(version), help: () => console.log(usage()), exit: () => { process.exitCode = 1 console.error( usage(`Error: command '${bin.command}' does not exist.`) ) }, } const commandFiles = fs.readdirSync(__dirname).filter((f) => path.extname(f) === '.js' && f !== __filename) for (const file of commandFiles) { const command = require(`./${file}`) const name = path.basename(file, '.js') const totalTime = `bin:${name}:init` const scriptTime = `bin:${name}:script` commands[name] = () => { const timers = require('./lib/timers') const log = require('./lib/logging') log.info(name, options) const timeEnd = time.start(totalTime) const scriptEnd = time.start(scriptTime) return command(options, (result) => { scriptEnd() return { result, timing: { seconds: `${timers.get(scriptTime) / 1e9}s`, ms: `${timers.get(scriptTime) / 1e6}ms`, }, } }) .then((result) => { log.info(result) return result }) .catch((err) => { process.exitCode = 1 log.error(err) return err }) .then((r) => { timeEnd() if (bin.loglevel !== 'silent') { console[process.exitCode ? 'error' : 'log'](r) } return r }) } } if (commands[bin.command]) { commands[bin.command]() } else { commands.exit() } PK]�\�w'�wwarborist/bin/audit.jsnu�[���const Arborist = require('../') const printTree = require('./lib/print-tree.js') const log = require('./lib/logging.js') const Vuln = require('../lib/vuln.js') const printReport = report => { for (const vuln of report.values()) { log.info(printVuln(vuln)) } if (report.topVulns.size) { log.info('\n# top-level vulnerabilities') for (const vuln of report.topVulns.values()) { log.info(printVuln(vuln)) } } } const printVuln = vuln => { return { __proto__: { constructor: Vuln }, name: vuln.name, issues: [...vuln.advisories].map(a => printAdvisory(a)), range: vuln.simpleRange, nodes: [...vuln.nodes].map(node => `${node.name} ${node.location || '#ROOT'}`), ...(vuln.topNodes.size === 0 ? {} : { topNodes: [...vuln.topNodes].map(node => `${node.location || '#ROOT'}`), }), } } const printAdvisory = a => `${a.title}${a.url ? ' ' + a.url : ''}` module.exports = (options, time) => { const arb = new Arborist(options) return arb .audit(options) .then(time) .then(async ({ timing, result: tree }) => { if (options.fix) { printTree(tree) } printReport(arb.auditReport) if (tree.meta && options.save) { await tree.meta.save() } return options.fix ? `resolved ${tree.inventory.size} deps in ${timing.seconds}` : `done in ${timing.seconds}` }) } PK]�\4N\N||arborist/bin/prune.jsnu�[���const Arborist = require('../') const printTree = require('./lib/print-tree.js') const log = require('./lib/logging.js') const printDiff = diff => { const { depth } = require('treeverse') depth({ tree: diff, visit: d => { if (d.location === '') { return } switch (d.action) { case 'REMOVE': log.info('REMOVE', d.actual.location) break case 'ADD': log.info('ADD', d.ideal.location, d.ideal.resolved) break case 'CHANGE': log.info('CHANGE', d.actual.location, { from: d.actual.resolved, to: d.ideal.resolved, }) break } }, getChildren: d => d.children, }) } module.exports = (options, time) => { const arb = new Arborist(options) return arb .prune(options) .then(time) .then(async ({ timing, result: tree }) => { printTree(tree) if (options.dryRun) { printDiff(arb.diff) } if (tree.meta && options.save) { await tree.meta.save() } return `resolved ${tree.inventory.size} deps in ${timing.seconds}` }) } PK]�\n# �||arborist/bin/reify.jsnu�[���const Arborist = require('../') const printTree = require('./lib/print-tree.js') const log = require('./lib/logging.js') const printDiff = diff => { const { depth } = require('treeverse') depth({ tree: diff, visit: d => { if (d.location === '') { return } switch (d.action) { case 'REMOVE': log.info('REMOVE', d.actual.location) break case 'ADD': log.info('ADD', d.ideal.location, d.ideal.resolved) break case 'CHANGE': log.info('CHANGE', d.actual.location, { from: d.actual.resolved, to: d.ideal.resolved, }) break } }, getChildren: d => d.children, }) } module.exports = (options, time) => { const arb = new Arborist(options) return arb .reify(options) .then(time) .then(async ({ timing, result: tree }) => { printTree(tree) if (options.dryRun) { printDiff(arb.diff) } if (tree.meta && options.save) { await tree.meta.save() } return `resolved ${tree.inventory.size} deps in ${timing.seconds}` }) } PK]�\�р���arborist/bin/license.jsnu�[���const localeCompare = require('@isaacs/string-locale-compare')('en') const Arborist = require('../') const log = require('./lib/logging.js') module.exports = (options, time) => { const query = options._.shift() const a = new Arborist(options) return a .loadVirtual() .then(tree => { // only load the actual tree if the virtual one doesn't have modern metadata if (!tree.meta || !(tree.meta.originalLockfileVersion >= 2)) { throw 'load actual' } else { return tree } }).catch((er) => { log.error('loading actual tree', er) return a.loadActual() }) .then(time) .then(({ result: tree }) => { const output = [] if (!query) { const set = [] for (const license of tree.inventory.query('license')) { set.push([tree.inventory.query('license', license).size, license]) } for (const [count, license] of set.sort((a, b) => a[1] && b[1] ? b[0] - a[0] || localeCompare(a[1], b[1]) : a[1] ? -1 : b[1] ? 1 : 0)) { output.push(`${count} ${license}`) log.info(count, license) } } else { for (const node of tree.inventory.query('license', query === 'undefined' ? undefined : query)) { const msg = `${node.name} ${node.location} ${node.package.description || ''}` output.push(msg) log.info(msg) } } return output.join('\n') }) } PK]�\3����arborist/bin/shrinkwrap.jsnu�[���const Shrinkwrap = require('../lib/shrinkwrap.js') module.exports = (options, time) => Shrinkwrap .load(options) .then((s) => s.commit()) .then(time) .then(({ result: s }) => JSON.stringify(s, 0, 2)) PK]�\�K�&&arborist/bin/actual.jsnu�[���const Arborist = require('../') const printTree = require('./lib/print-tree.js') module.exports = (options, time) => new Arborist(options) .loadActual(options) .then(time) .then(async ({ timing, result: tree }) => { printTree(tree) if (options.save) { await tree.meta.save() } if (options.saveHidden) { tree.meta.hiddenLockfile = true tree.meta.filename = options.path + '/node_modules/.package-lock.json' await tree.meta.save() } return `read ${tree.inventory.size} deps in ${timing.ms}` }) PK]�\`6fq��arborist/bin/ideal.jsnu�[���const Arborist = require('../') const printTree = require('./lib/print-tree.js') module.exports = (options, time) => new Arborist(options) .buildIdealTree(options) .then(time) .then(async ({ timing, result: tree }) => { printTree(tree) if (tree.meta && options.save) { await tree.meta.save() } return `resolved ${tree.inventory.size} deps in ${timing.seconds}` }) PK]�\��d7�E�Earborist/README.mdnu�[���# @npmcli/arborist [](https://npm.im/@npmcli/arborist) [](https://npm.im/@npmcli/arborist) [](https://github.com/npm/cli/actions/workflows/ci-npmcli-arborist.yml) Inspect and manage `node_modules` trees.  There's more documentation [in the docs folder](https://github.com/npm/cli/tree/latest/workspaces/arborist/docs). ## USAGE ```js const Arborist = require('@npmcli/arborist') const arb = new Arborist({ // options object // where we're doing stuff. defaults to cwd. path: '/path/to/package/root', // url to the default registry. defaults to npm's default registry registry: 'https://registry.npmjs.org', // scopes can be mapped to a different registry '@foo:registry': 'https://registry.foo.com/', // Auth can be provided in a couple of different ways. If none are // provided, then requests are anonymous, and private packages will 404. // Arborist doesn't do anything with these, it just passes them down // the chain to pacote and npm-registry-fetch. // Safest: a bearer token provided by a registry: // 1. an npm auth token, used with the default registry token: 'deadbeefcafebad', // 2. an alias for the same thing: _authToken: 'deadbeefcafebad', // insecure options: // 3. basic auth, username:password, base64 encoded auth: 'aXNhYWNzOm5vdCBteSByZWFsIHBhc3N3b3Jk', // 4. username and base64 encoded password username: 'isaacs', password: 'bm90IG15IHJlYWwgcGFzc3dvcmQ=', // auth configs can also be scoped to a given registry with this // rather unusual pattern: '//registry.foo.com:token': 'blahblahblah', '//basic.auth.only.foo.com:_auth': 'aXNhYWNzOm5vdCBteSByZWFsIHBhc3N3b3Jk', '//registry.foo.com:always-auth': true, }) // READING // returns a promise. reads the actual contents of node_modules arb.loadActual().then(tree => { // tree is also stored at arb.virtualTree }) // read just what the package-lock.json/npm-shrinkwrap says // This *also* loads the yarn.lock file, but that's only relevant // when building the ideal tree. arb.loadVirtual().then(tree => { // tree is also stored at arb.virtualTree // now arb.virtualTree is loaded // this fails if there's no package-lock.json or package.json in the folder // note that loading this way should only be done if there's no // node_modules folder }) // OPTIMIZING AND DESIGNING // build an ideal tree from the package.json and various lockfiles. arb.buildIdealTree(options).then(() => { // next step is to reify that ideal tree onto disk. // options can be: // rm: array of package names to remove at top level // add: Array of package specifiers to add at the top level. Each of // these will be resolved with pacote.manifest if the name can't be // determined from the spec. (Eg, `github:foo/bar` vs `foo@somespec`.) // The dep will be saved in the location where it already exists, // (or pkg.dependencies) unless a different saveType is specified. // saveType: Save added packages in a specific dependency set. // - null (default) Wherever they exist already, or 'dependencies' // - prod: definitely in 'dependencies' // - optional: in 'optionalDependencies' // - dev: devDependencies // - peer: save in peerDependencies, and remove any optional flag from // peerDependenciesMeta if one exists // - peerOptional: save in peerDependencies, and add a // peerDepsMeta[name].optional flag // saveBundle: add newly added deps to the bundleDependencies list // update: Either `true` to just go ahead and update everything, or an // object with any or all of the following fields: // - all: boolean. set to true to just update everything // - names: names of packages update (like `npm update foo`) // prune: boolean, default true. Prune extraneous nodes from the tree. // preferDedupe: prefer to deduplicate packages if possible, rather than // choosing a newer version of a dependency. Defaults to false, ie, // always try to get the latest and greatest deps. // legacyBundling: Nest every dep under the node requiring it, npm v2 style. // No unnecessary deduplication. Default false. // At the end of this process, arb.idealTree is set. }) // WRITING // Make the idealTree be the thing that's on disk arb.reify({ // write the lockfile(s) back to disk, and package.json with any updates // defaults to 'true' save: true, }).then(() => { // node modules has been written to match the idealTree }) ``` ## DATA STRUCTURES A `node_modules` tree is a logical graph of dependencies overlaid on a physical tree of folders. A `Node` represents a package folder on disk, either at the root of the package, or within a `node_modules` folder. The physical structure of the folder tree is represented by the `node.parent` reference to the containing folder, and `node.children` map of nodes within its `node_modules` folder, where the key in the map is the name of the folder in `node_modules`, and the value is the child node. A node without a parent is a top of tree. A `Link` represents a symbolic link to a package on disk. This can be a symbolic link to a package folder within the current tree, or elsewhere on disk. The `link.target` is a reference to the actual node. Links differ from Nodes in that dependencies are resolved from the _target_ location, rather than from the link location. An `Edge` represents a dependency relationship. Each node has an `edgesIn` set, and an `edgesOut` map. Each edge has a `type` which specifies what kind of dependency it represents: `'prod'` for regular dependencies, `'peer'` for peerDependencies, `'dev'` for devDependencies, and `'optional'` for optionalDependencies. `edge.from` is a reference to the node that has the dependency, and `edge.to` is a reference to the node that requires the dependency. As nodes are moved around in the tree, the graph edges are automatically updated to point at the new module resolution targets. In other words, `edge.from`, `edge.name`, and `edge.spec` are immutable; `edge.to` is updated automatically when a node's parent changes. ### class Node All arborist trees are `Node` objects. A `Node` refers to a package folder, which may have children in `node_modules`. * `node.name` The name of this node's folder in `node_modules`. * `node.parent` Physical parent node in the tree. The package in whose `node_modules` folder this package lives. Null if node is top of tree. Setting `node.parent` will automatically update `node.location` and all graph edges affected by the move. * `node.meta` A `Shrinkwrap` object which looks up `resolved` and `integrity` values for all modules in this tree. Only relevant on `root` nodes. * `node.children` Map of packages located in the node's `node_modules` folder. * `node.package` The contents of this node's `package.json` file. * `node.path` File path to this package. If the node is a link, then this is the path to the link, not to the link target. If the node is _not_ a link, then this matches `node.realpath`. * `node.realpath` The full real filepath on disk where this node lives. * `node.location` A slash-normalized relative path from the root node to this node's path. * `node.isLink` Whether this represents a symlink. Always `false` for Node objects, always `true` for Link objects. * `node.isRoot` True if this node is a root node. (Ie, if `node.root === node`.) * `node.root` The root node where we are working. If not assigned to some other value, resolves to the node itself. (Ie, the root node's `root` property refers to itself.) * `node.isTop` True if this node is the top of its tree (ie, has no `parent`, false otherwise). * `node.top` The top node in this node's tree. This will be equal to `node.root` for simple trees, but link targets will frequently be outside of (or nested somewhere within) a `node_modules` hierarchy, and so will have a different `top`. * `node.dev`, `node.optional`, `node.devOptional`, `node.peer`, Indicators as to whether this node is a dev, optional, and/or peer dependency. These flags are relevant when pruning dependencies out of the tree or deciding what to reify. See **Package Dependency Flags** below for explanations. * `node.edgesOut` Edges in the dependency graph indicating nodes that this node depends on, which resolve its dependencies. * `node.edgesIn` Edges in the dependency graph indicating nodes that depend on this node. * `extraneous` True if this package is not required by any other for any reason. False for top of tree. * `node.resolve(name)` Identify the node that will be returned when code in this package runs `require(name)` * `node.errors` Array of errors encountered while parsing package.json or version specifiers. ### class Link Link objects represent a symbolic link within the `node_modules` folder. They have most of the same properties and methods as `Node` objects, with a few differences. * `link.target` A Node object representing the package that the link references. If this is a Node already present within the tree, then it will be the same object. If it's outside of the tree, then it will be treated as the top of its own tree. * `link.isLink` Always true. * `link.children` This is always an empty map, since links don't have their own children directly. ### class Edge Edge objects represent a dependency relationship a package node to the point in the tree where the dependency will be loaded. As nodes are moved within the tree, Edges automatically update to point to the appropriate location. * `new Edge({ from, type, name, spec })` Creates a new edge with the specified fields. After instantiation, none of the fields can be changed directly. * `edge.from` The node that has the dependency. * `edge.type` The type of dependency. One of `'prod'`, `'dev'`, `'peer'`, or `'optional'`. * `edge.name` The name of the dependency. Ie, the key in the relevant `package.json` dependencies object. * `edge.spec` The specifier that is required. This can be a version, range, tag name, git url, or tarball URL. Any specifier allowed by npm is supported. * `edge.to` Automatically set to the node in the tree that matches the `name` field. * `edge.valid` True if `edge.to` satisfies the specifier. * `edge.error` A string indicating the type of error if there is a problem, or `null` if it's valid. Values, in order of precedence: * `DETACHED` Indicates that the edge has been detached from its `edge.from` node, typically because a new edge was created when a dependency specifier was modified. * `MISSING` Indicates that the dependency is unmet. Note that this is _not_ set for unmet dependencies of the `optional` type. * `PEER LOCAL` Indicates that a `peerDependency` is found in the node's local `node_modules` folder, and the node is not the top of the tree. This violates the `peerDependency` contract, because it means that the dependency is not a peer. * `INVALID` Indicates that the dependency does not satisfy `edge.spec`. * `edge.reload()` Re-resolve to find the appropriate value for `edge.to`. Called automatically from the `Node` class when the tree is mutated. ### Package Dependency Flags The dependency type of a node can be determined efficiently by looking at the `dev`, `optional`, and `devOptional` flags on the node object. These are updated by arborist when necessary whenever the tree is modified in such a way that the dependency graph can change, and are relevant when pruning nodes from the tree. ``` | extraneous | peer | dev | optional | devOptional | meaning | prune? | |------------+------+-----+----------+-------------+---------------------+-------------------| | | | | | | production dep | never | |------------+------+-----+----------+-------------+---------------------+-------------------| | X | N/A | N/A | N/A | N/A | nothing depends on | always | | | | | | | this, it is trash | | |------------+------+-----+----------+-------------+---------------------+-------------------| | | | X | | X | devDependency, or | if pruning dev | | | | | | not in lock | only depended upon | | | | | | | | by devDependencies | | |------------+------+-----+----------+-------------+---------------------+-------------------| | | | | X | X | optionalDependency, | if pruning | | | | | | not in lock | or only depended on | optional | | | | | | | by optionalDeps | | |------------+------+-----+----------+-------------+---------------------+-------------------| | | | X | X | X | Optional dependency | if pruning EITHER | | | | | | not in lock | of dep(s) in the | dev OR optional | | | | | | | dev hierarchy | | |------------+------+-----+----------+-------------+---------------------+-------------------| | | | | | X | BOTH a non-optional | if pruning BOTH | | | | | | in lock | dep within the dev | dev AND optional | | | | | | | hierarchy, AND a | | | | | | | | dep within the | | | | | | | | optional hierarchy | | |------------+------+-----+----------+-------------+---------------------+-------------------| | | X | | | | peer dependency, or | if pruning peers | | | | | | | only depended on by | | | | | | | | peer dependencies | | |------------+------+-----+----------+-------------+---------------------+-------------------| | | X | X | | X | peer dependency of | if pruning peer | | | | | | not in lock | dev node hierarchy | OR dev deps | |------------+------+-----+----------+-------------+---------------------+-------------------| | | X | | X | X | peer dependency of | if pruning peer | | | | | | not in lock | optional nodes, or | OR optional deps | | | | | | | peerOptional dep | | |------------+------+-----+----------+-------------+---------------------+-------------------| | | X | X | X | X | peer optional deps | if pruning peer | | | | | | not in lock | of the dev dep | OR optional OR | | | | | | | hierarchy | dev | |------------+------+-----+----------+-------------+---------------------+-------------------| | | X | | | X | BOTH a non-optional | if pruning peers | | | | | | in lock | peer dep within the | OR: | | | | | | | dev hierarchy, AND | BOTH optional | | | | | | | a peer optional dep | AND dev deps | +------------+------+-----+----------+-------------+---------------------+-------------------+ ``` * If none of these flags are set, then the node is required by the dependency and/or peerDependency hierarchy. It should not be pruned. * If _both_ `node.dev` and `node.optional` are set, then the node is an optional dependency of one of the packages in the devDependency hierarchy. It should be pruned if _either_ dev or optional deps are being removed. * If `node.dev` is set, but `node.optional` is not, then the node is required in the devDependency hierarchy. It should be pruned if dev dependencies are being removed. * If `node.optional` is set, but `node.dev` is not, then the node is required in the optionalDependency hierarchy. It should be pruned if optional dependencies are being removed. * If `node.devOptional` is set, then the node is a (non-optional) dependency within the devDependency hierarchy, _and_ a dependency within the `optionalDependency` hierarchy. It should be pruned if _both_ dev and optional dependencies are being removed. * If `node.peer` is set, then all the same semantics apply as above, except that the dep is brought in by a peer dep at some point, rather than a normal non-peer dependency. Note: `devOptional` is only set in the shrinkwrap/package-lock file if _neither_ `dev` nor `optional` are set, as it would be redundant. ## BIN Arborist ships with a cli that can be used to run arborist specific commands outside of the context of the npm CLI. This script is currently not part of the public API and is subject to breaking changes outside of major version bumps. To see the usage run: ``` npx @npmcli/arborist --help ``` PK]�\�r��arborist/LICENSE.mdnu�[���<!-- This file is automatically added by @npmcli/template-oss. Do not edit. --> ISC License Copyright npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\Q~ﮝ�agent/package.jsonnu�[���{ "_id": "@npmcli/agent@2.2.2", "_inBundle": true, "_location": "/npm/@npmcli/agent", "_phantomChildren": {}, "_requiredBy": [ "/npm/make-fetch-happen" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/agent/issues" }, "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", "lru-cache": "^10.0.1", "socks-proxy-agent": "^8.0.3" }, "description": "the http/https agent used by the npm cli", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.21.3", "minipass-fetch": "^3.0.3", "nock": "^13.2.7", "semver": "^7.5.4", "simple-socks": "^3.1.0", "tap": "^16.3.0" }, "engines": { "node": "^16.14.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/agent#readme", "license": "ISC", "main": "lib/index.js", "name": "@npmcli/agent", "repository": { "type": "git", "url": "git+https://github.com/npm/agent.git" }, "scripts": { "gencerts": "bash scripts/create-cert.sh", "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.21.3", "publish": "true" }, "version": "2.2.2" } PK]�\�c��agent/lib/proxy.jsnu�[���'use strict' const { HttpProxyAgent } = require('http-proxy-agent') const { HttpsProxyAgent } = require('https-proxy-agent') const { SocksProxyAgent } = require('socks-proxy-agent') const { LRUCache } = require('lru-cache') const { InvalidProxyProtocolError } = require('./errors.js') const PROXY_CACHE = new LRUCache({ max: 20 }) const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { key = key.toLowerCase() if (PROXY_ENV_KEYS.has(key)) { acc[key] = value } return acc }, {}) const getProxyAgent = (url) => { url = new URL(url) const protocol = url.protocol.slice(0, -1) if (SOCKS_PROTOCOLS.has(protocol)) { return SocksProxyAgent } if (protocol === 'https' || protocol === 'http') { return [HttpProxyAgent, HttpsProxyAgent] } throw new InvalidProxyProtocolError(url) } const isNoProxy = (url, noProxy) => { if (typeof noProxy === 'string') { noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) } if (!noProxy || !noProxy.length) { return false } const hostSegments = url.hostname.split('.').reverse() return noProxy.some((no) => { const noSegments = no.split('.').filter(Boolean).reverse() if (!noSegments.length) { return false } for (let i = 0; i < noSegments.length; i++) { if (hostSegments[i] !== noSegments[i]) { return false } } return true }) } const getProxy = (url, { proxy, noProxy }) => { url = new URL(url) if (!proxy) { proxy = url.protocol === 'https:' ? PROXY_ENV.https_proxy : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy } if (!noProxy) { noProxy = PROXY_ENV.no_proxy } if (!proxy || isNoProxy(url, noProxy)) { return null } return new URL(proxy) } module.exports = { getProxyAgent, getProxy, proxyCache: PROXY_CACHE, } PK]�\0��**agent/lib/agents.jsnu�[���'use strict' const net = require('net') const tls = require('tls') const { once } = require('events') const timers = require('timers/promises') const { normalizeOptions, cacheOptions } = require('./options') const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') const Errors = require('./errors.js') const { Agent: AgentBase } = require('agent-base') module.exports = class Agent extends AgentBase { #options #timeouts #proxy #noProxy #ProxyAgent constructor (options = {}) { const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) super(normalizedOptions) this.#options = normalizedOptions this.#timeouts = timeouts if (proxy) { this.#proxy = new URL(proxy) this.#noProxy = noProxy this.#ProxyAgent = getProxyAgent(proxy) } } get proxy () { return this.#proxy ? { url: this.#proxy } : {} } #getProxy (options) { if (!this.#proxy) { return } const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { proxy: this.#proxy, noProxy: this.#noProxy, }) if (!proxy) { return } const cacheKey = cacheOptions({ ...options, ...this.#options, timeouts: this.#timeouts, proxy, }) if (proxyCache.has(cacheKey)) { return proxyCache.get(cacheKey) } let ProxyAgent = this.#ProxyAgent if (Array.isArray(ProxyAgent)) { ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] } const proxyAgent = new ProxyAgent(proxy, { ...this.#options, socketOptions: { family: this.#options.family }, }) proxyCache.set(cacheKey, proxyAgent) return proxyAgent } // takes an array of promises and races them against the connection timeout // which will throw the necessary error if it is hit. This will return the // result of the promise race. async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { if (timeout) { const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) .then(() => { throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) }).catch((err) => { if (err.name === 'AbortError') { return } throw err }) promises.push(connectionTimeout) } let result try { result = await Promise.race(promises) ac.abort() } catch (err) { ac.abort() throw err } return result } async connect (request, options) { // if the connection does not have its own lookup function // set, then use the one from our options options.lookup ??= this.#options.lookup let socket let timeout = this.#timeouts.connection const isSecureEndpoint = this.isSecureEndpoint(options) const proxy = this.#getProxy(options) if (proxy) { // some of the proxies will wait for the socket to fully connect before // returning so we have to await this while also racing it against the // connection timeout. const start = Date.now() socket = await this.#timeoutConnection({ options, timeout, promises: [proxy.connect(request, options)], }) // see how much time proxy.connect took and subtract it from // the timeout if (timeout) { timeout = timeout - (Date.now() - start) } } else { socket = (isSecureEndpoint ? tls : net).connect(options) } socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) socket.setNoDelay(this.keepAlive) const abortController = new AbortController() const { signal } = abortController const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) : Promise.resolve() await this.#timeoutConnection({ options, timeout, promises: [ connectPromise, once(socket, 'error', { signal }).then((err) => { throw err[0] }), ], }, abortController) if (this.#timeouts.idle) { socket.setTimeout(this.#timeouts.idle, () => { socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) }) } return socket } addRequest (request, options) { const proxy = this.#getProxy(options) // it would be better to call proxy.addRequest here but this causes the // http-proxy-agent to call its super.addRequest which causes the request // to be added to the agent twice. since we only support 3 agents // currently (see the required agents in proxy.js) we have manually // checked that the only public methods we need to call are called in the // next block. this could change in the future and presumably we would get // failing tests until we have properly called the necessary methods on // each of our proxy agents if (proxy?.setRequestProps) { proxy.setRequestProps(request, options) } request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') if (this.#timeouts.response) { let responseTimeout request.once('finish', () => { setTimeout(() => { request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) }, this.#timeouts.response) }) request.once('response', () => { clearTimeout(responseTimeout) }) } if (this.#timeouts.transfer) { let transferTimeout request.once('response', (res) => { setTimeout(() => { res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) }, this.#timeouts.transfer) res.once('close', () => { clearTimeout(transferTimeout) }) }) } return super.addRequest(request, options) } } PK]�\��X���agent/lib/dns.jsnu�[���'use strict' const { LRUCache } = require('lru-cache') const dns = require('dns') // this is a factory so that each request can have its own opts (i.e. ttl) // while still sharing the cache across all requests const cache = new LRUCache({ max: 50 }) const getOptions = ({ family = 0, hints = dns.ADDRCONFIG, all = false, verbatim = undefined, ttl = 5 * 60 * 1000, lookup = dns.lookup, }) => ({ // hints and lookup are returned since both are top level properties to (net|tls).connect hints, lookup: (hostname, ...args) => { const callback = args.pop() // callback is always last arg const lookupOptions = args[0] ?? {} const options = { family, hints, all, verbatim, ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), } const key = JSON.stringify({ hostname, ...options }) if (cache.has(key)) { const cached = cache.get(key) return process.nextTick(callback, null, ...cached) } lookup(hostname, options, (err, ...result) => { if (err) { return callback(err) } cache.set(key, result, { ttl }) return callback(null, ...result) }) }, }) module.exports = { cache, getOptions, } PK]�\7�e���agent/lib/index.jsnu�[���'use strict' const { LRUCache } = require('lru-cache') const { normalizeOptions, cacheOptions } = require('./options') const { getProxy, proxyCache } = require('./proxy.js') const dns = require('./dns.js') const Agent = require('./agents.js') const agentCache = new LRUCache({ max: 20 }) const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { // false has meaning so this can't be a simple truthiness check if (agent != null) { return agent } url = new URL(url) const proxyForUrl = getProxy(url, { proxy, noProxy }) const normalizedOptions = { ...normalizeOptions(options), proxy: proxyForUrl, } const cacheKey = cacheOptions({ ...normalizedOptions, secureEndpoint: url.protocol === 'https:', }) if (agentCache.has(cacheKey)) { return agentCache.get(cacheKey) } const newAgent = new Agent(normalizedOptions) agentCache.set(cacheKey, newAgent) return newAgent } module.exports = { getAgent, Agent, // these are exported for backwards compatability HttpAgent: Agent, HttpsAgent: Agent, cache: { proxy: proxyCache, agent: agentCache, dns: dns.cache, clear: () => { proxyCache.clear() agentCache.clear() dns.cache.clear() }, }, } PK]�\��!\rragent/lib/errors.jsnu�[���'use strict' class InvalidProxyProtocolError extends Error { constructor (url) { super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) this.code = 'EINVALIDPROXY' this.proxy = url } } class ConnectionTimeoutError extends Error { constructor (host) { super(`Timeout connecting to host \`${host}\``) this.code = 'ECONNECTIONTIMEOUT' this.host = host } } class IdleTimeoutError extends Error { constructor (host) { super(`Idle timeout reached for host \`${host}\``) this.code = 'EIDLETIMEOUT' this.host = host } } class ResponseTimeoutError extends Error { constructor (request, proxy) { let msg = 'Response timeout ' if (proxy) { msg += `from proxy \`${proxy.host}\` ` } msg += `connecting to host \`${request.host}\`` super(msg) this.code = 'ERESPONSETIMEOUT' this.proxy = proxy this.request = request } } class TransferTimeoutError extends Error { constructor (request, proxy) { let msg = 'Transfer timeout ' if (proxy) { msg += `from proxy \`${proxy.host}\` ` } msg += `for \`${request.host}\`` super(msg) this.code = 'ETRANSFERTIMEOUT' this.proxy = proxy this.request = request } } module.exports = { InvalidProxyProtocolError, ConnectionTimeoutError, IdleTimeoutError, ResponseTimeoutError, TransferTimeoutError, } PK]�\�x�� � agent/lib/options.jsnu�[���'use strict' const dns = require('./dns') const normalizeOptions = (opts) => { const family = parseInt(opts.family ?? '0', 10) const keepAlive = opts.keepAlive ?? true const normalized = { // nodejs http agent options. these are all the defaults // but kept here to increase the likelihood of cache hits // https://nodejs.org/api/http.html#new-agentoptions keepAliveMsecs: keepAlive ? 1000 : undefined, maxSockets: opts.maxSockets ?? 15, maxTotalSockets: Infinity, maxFreeSockets: keepAlive ? 256 : undefined, scheduling: 'fifo', // then spread the rest of the options ...opts, // we already set these to their defaults that we want family, keepAlive, // our custom timeout options timeouts: { // the standard timeout option is mapped to our idle timeout // and then deleted below idle: opts.timeout ?? 0, connection: 0, response: 0, transfer: 0, ...opts.timeouts, }, // get the dns options that go at the top level of socket connection ...dns.getOptions({ family, ...opts.dns }), } // remove timeout since we already used it to set our own idle timeout delete normalized.timeout return normalized } const createKey = (obj) => { let key = '' const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) for (let [k, v] of sorted) { if (v == null) { v = 'null' } else if (v instanceof URL) { v = v.toString() } else if (typeof v === 'object') { v = createKey(v) } key += `${k}:${v}:` } return key } const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ secureEndpoint: !!secureEndpoint, // socket connect options family: options.family, hints: options.hints, localAddress: options.localAddress, // tls specific connect options strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, ca: secureEndpoint ? options.ca : null, cert: secureEndpoint ? options.cert : null, key: secureEndpoint ? options.key : null, // http agent options keepAlive: options.keepAlive, keepAliveMsecs: options.keepAliveMsecs, maxSockets: options.maxSockets, maxTotalSockets: options.maxTotalSockets, maxFreeSockets: options.maxFreeSockets, scheduling: options.scheduling, // timeout options timeouts: options.timeouts, // proxy proxy: options.proxy, }) module.exports = { normalizeOptions, cacheOptions, } PK]�\&���@@promise-spawn/package.jsonnu�[���{ "_id": "@npmcli/promise-spawn@7.0.2", "_inBundle": true, "_location": "/npm/@npmcli/promise-spawn", "_phantomChildren": {}, "_requiredBy": [ "/npm", "/npm/@npmcli/git", "/npm/@npmcli/run-script", "/npm/pacote" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/promise-spawn/issues" }, "dependencies": { "which": "^4.0.0" }, "description": "spawn processes the way the npm cli likes to do", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.22.0", "spawk": "^1.7.1", "tap": "^16.0.1" }, "engines": { "node": "^16.14.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/promise-spawn#readme", "license": "ISC", "main": "./lib/index.js", "name": "@npmcli/promise-spawn", "repository": { "type": "git", "url": "git+https://github.com/npm/promise-spawn.git" }, "scripts": { "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "postsnap": "npm run lintfix --", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "check-coverage": true, "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.22.0", "publish": true }, "version": "7.0.2" } PK]�\�>�promise-spawn/lib/escape.jsnu�[���'use strict' // eslint-disable-next-line max-len // this code adapted from: https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/ const cmd = (input, doubleEscape) => { if (!input.length) { return '""' } let result if (!/[ \t\n\v"]/.test(input)) { result = input } else { result = '"' for (let i = 0; i <= input.length; ++i) { let slashCount = 0 while (input[i] === '\\') { ++i ++slashCount } if (i === input.length) { result += '\\'.repeat(slashCount * 2) break } if (input[i] === '"') { result += '\\'.repeat(slashCount * 2 + 1) result += input[i] } else { result += '\\'.repeat(slashCount) result += input[i] } } result += '"' } // and finally, prefix shell meta chars with a ^ result = result.replace(/[ !%^&()<>|"]/g, '^$&') if (doubleEscape) { result = result.replace(/[ !%^&()<>|"]/g, '^$&') } return result } const sh = (input) => { if (!input.length) { return `''` } if (!/[\t\n\r "#$&'()*;<>?\\`|~]/.test(input)) { return input } // replace single quotes with '\'' and wrap the whole result in a fresh set of quotes const result = `'${input.replace(/'/g, `'\\''`)}'` // if the input string already had single quotes around it, clean those up .replace(/^(?:'')+(?!$)/, '') .replace(/\\'''/g, `\\'`) return result } module.exports = { cmd, sh, } PK]�\A�*���promise-spawn/lib/index.jsnu�[���'use strict' const { spawn } = require('child_process') const os = require('os') const which = require('which') const escape = require('./escape.js') // 'extra' object is for decorating the error a bit more const promiseSpawn = (cmd, args, opts = {}, extra = {}) => { if (opts.shell) { return spawnWithShell(cmd, args, opts, extra) } let resolve, reject const promise = new Promise((_resolve, _reject) => { resolve = _resolve reject = _reject }) // Create error here so we have a more useful stack trace when rejecting const closeError = new Error('command failed') const stdout = [] const stderr = [] const getResult = (result) => ({ cmd, args, ...result, ...stdioResult(stdout, stderr, opts), ...extra, }) const rejectWithOpts = (er, erOpts) => { const resultError = getResult(erOpts) reject(Object.assign(er, resultError)) } const proc = spawn(cmd, args, opts) promise.stdin = proc.stdin promise.process = proc proc.on('error', rejectWithOpts) if (proc.stdout) { proc.stdout.on('data', c => stdout.push(c)) proc.stdout.on('error', rejectWithOpts) } if (proc.stderr) { proc.stderr.on('data', c => stderr.push(c)) proc.stderr.on('error', rejectWithOpts) } proc.on('close', (code, signal) => { if (code || signal) { rejectWithOpts(closeError, { code, signal }) } else { resolve(getResult({ code, signal })) } }) return promise } const spawnWithShell = (cmd, args, opts, extra) => { let command = opts.shell // if shell is set to true, we use a platform default. we can't let the core // spawn method decide this for us because we need to know what shell is in use // ahead of time so that we can escape arguments properly. we don't need coverage here. if (command === true) { // istanbul ignore next command = process.platform === 'win32' ? process.env.ComSpec : 'sh' } const options = { ...opts, shell: false } const realArgs = [] let script = cmd // first, determine if we're in windows because if we are we need to know if we're // running an .exe or a .cmd/.bat since the latter requires extra escaping const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(command) if (isCmd) { let doubleEscape = false // find the actual command we're running let initialCmd = '' let insideQuotes = false for (let i = 0; i < cmd.length; ++i) { const char = cmd.charAt(i) if (char === ' ' && !insideQuotes) { break } initialCmd += char if (char === '"' || char === "'") { insideQuotes = !insideQuotes } } let pathToInitial try { pathToInitial = which.sync(initialCmd, { path: (options.env && findInObject(options.env, 'PATH')) || process.env.PATH, pathext: (options.env && findInObject(options.env, 'PATHEXT')) || process.env.PATHEXT, }).toLowerCase() } catch (err) { pathToInitial = initialCmd.toLowerCase() } doubleEscape = pathToInitial.endsWith('.cmd') || pathToInitial.endsWith('.bat') for (const arg of args) { script += ` ${escape.cmd(arg, doubleEscape)}` } realArgs.push('/d', '/s', '/c', script) options.windowsVerbatimArguments = true } else { for (const arg of args) { script += ` ${escape.sh(arg)}` } realArgs.push('-c', script) } return promiseSpawn(command, realArgs, options, extra) } // open a file with the default application as defined by the user's OS const open = (_args, opts = {}, extra = {}) => { const options = { ...opts, shell: true } const args = [].concat(_args) let platform = process.platform // process.platform === 'linux' may actually indicate WSL, if that's the case // we want to treat things as win32 anyway so the host can open the argument if (platform === 'linux' && os.release().toLowerCase().includes('microsoft')) { platform = 'win32' } let command = options.command if (!command) { if (platform === 'win32') { // spawnWithShell does not do the additional os.release() check, so we // have to force the shell here to make sure we treat WSL as windows. options.shell = process.env.ComSpec // also, the start command accepts a title so to make sure that we don't // accidentally interpret the first arg as the title, we stick an empty // string immediately after the start command command = 'start ""' } else if (platform === 'darwin') { command = 'open' } else { command = 'xdg-open' } } return spawnWithShell(command, args, options, extra) } promiseSpawn.open = open const isPipe = (stdio = 'pipe', fd) => { if (stdio === 'pipe' || stdio === null) { return true } if (Array.isArray(stdio)) { return isPipe(stdio[fd], fd) } return false } const stdioResult = (stdout, stderr, { stdioString = true, stdio }) => { const result = { stdout: null, stderr: null, } // stdio is [stdin, stdout, stderr] if (isPipe(stdio, 1)) { result.stdout = Buffer.concat(stdout) if (stdioString) { result.stdout = result.stdout.toString().trim() } } if (isPipe(stdio, 2)) { result.stderr = Buffer.concat(stderr) if (stdioString) { result.stderr = result.stderr.toString().trim() } } return result } // case insensitive lookup in an object const findInObject = (obj, key) => { key = key.toLowerCase() for (const objKey of Object.keys(obj).sort()) { if (objKey.toLowerCase() === key) { return obj[objKey] } } } module.exports = promiseSpawn PK]�\|�q���promise-spawn/LICENSEnu�[���The ISC License Copyright (c) npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\�.� map-workspaces/package.jsonnu�[���{ "_id": "@npmcli/map-workspaces@3.0.6", "_inBundle": true, "_location": "/npm/@npmcli/map-workspaces", "_phantomChildren": {}, "_requiredBy": [ "/npm", "/npm/@npmcli/arborist", "/npm/@npmcli/config" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/map-workspaces/issues" }, "dependencies": { "@npmcli/name-from-folder": "^2.0.0", "glob": "^10.2.2", "minimatch": "^9.0.0", "read-package-json-fast": "^3.0.0" }, "description": "Retrieves a name:pathname Map for a given workspaces config", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.21.3", "tap": "^16.0.1" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/map-workspaces#readme", "keywords": [ "npm", "npmcli", "libnpm", "cli", "workspaces", "map-workspaces" ], "license": "ISC", "main": "lib/index.js", "name": "@npmcli/map-workspaces", "repository": { "type": "git", "url": "git+https://github.com/npm/map-workspaces.git" }, "scripts": { "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "posttest": "npm run lint", "pretest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "check-coverage": true, "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.21.3", "publish": "true" }, "version": "3.0.6" } PK]�\=�.���map-workspaces/lib/index.jsnu�[���const path = require('path') const getName = require('@npmcli/name-from-folder') const { minimatch } = require('minimatch') const rpj = require('read-package-json-fast') const { glob } = require('glob') function appendNegatedPatterns (allPatterns) { const patterns = [] const negatedPatterns = [] for (let pattern of allPatterns) { const excl = pattern.match(/^!+/) if (excl) { pattern = pattern.slice(excl[0].length) } // strip off any / or ./ from the start of the pattern. /foo => foo pattern = pattern.replace(/^\.?\/+/, '') // an odd number of ! means a negated pattern. !!foo ==> foo const negate = excl && excl[0].length % 2 === 1 if (negate) { negatedPatterns.push(pattern) } else { // remove negated patterns that appeared before this pattern to avoid // ignoring paths that were matched afterwards // e.g: ['packages/**', '!packages/b/**', 'packages/b/a'] // in the above list, the last pattern overrides the negated pattern // right before it. In effect, the above list would become: // ['packages/**', 'packages/b/a'] // The order matters here which is why we must do it inside the loop // as opposed to doing it all together at the end. for (let i = 0; i < negatedPatterns.length; ++i) { const negatedPattern = negatedPatterns[i] if (minimatch(pattern, negatedPattern)) { negatedPatterns.splice(i, 1) } } patterns.push(pattern) } } // use the negated patterns to eagerly remove all the patterns that // can be removed to avoid unnecessary crawling for (const negated of negatedPatterns) { for (const pattern of minimatch.match(patterns, negated)) { patterns.splice(patterns.indexOf(pattern), 1) } } return { patterns, negatedPatterns } } function getPatterns (workspaces) { const workspacesDeclaration = Array.isArray(workspaces.packages) ? workspaces.packages : workspaces if (!Array.isArray(workspacesDeclaration)) { throw getError({ message: 'workspaces config expects an Array', code: 'EWORKSPACESCONFIG', }) } return appendNegatedPatterns(workspacesDeclaration) } function getPackageName (pkg, pathname) { const { name } = pkg return name || getName(pathname) } function pkgPathmame (opts) { return (...args) => { const cwd = opts.cwd ? opts.cwd : process.cwd() return path.join.apply(null, [cwd, ...args]) } } // make sure glob pattern only matches folders function getGlobPattern (pattern) { pattern = pattern.replace(/\\/g, '/') return pattern.endsWith('/') ? pattern : `${pattern}/` } function getError ({ Type = TypeError, message, code }) { return Object.assign(new Type(message), { code }) } function reverseResultMap (map) { return new Map(Array.from(map, item => item.reverse())) } async function mapWorkspaces (opts = {}) { if (!opts || !opts.pkg) { throw getError({ message: 'mapWorkspaces missing pkg info', code: 'EMAPWORKSPACESPKG', }) } const { workspaces = [] } = opts.pkg const { patterns, negatedPatterns } = getPatterns(workspaces) const results = new Map() const seen = new Map() if (!patterns.length && !negatedPatterns.length) { return results } const getGlobOpts = () => ({ ...opts, ignore: [ ...opts.ignore || [], '**/node_modules/**', // just ignore the negated patterns to avoid unnecessary crawling ...negatedPatterns, ], }) const getPackagePathname = pkgPathmame(opts) let matches = await glob(patterns.map((p) => getGlobPattern(p)), getGlobOpts()) // preserves glob@8 behavior matches = matches.sort((a, b) => a.localeCompare(b, 'en')) // we must preserve the order of results according to the given list of // workspace patterns const orderedMatches = [] for (const pattern of patterns) { orderedMatches.push(...matches.filter((m) => { return minimatch(m, pattern, { partial: true, windowsPathsNoEscape: true }) })) } for (const match of orderedMatches) { let pkg const packageJsonPathname = getPackagePathname(match, 'package.json') try { pkg = await rpj(packageJsonPathname) } catch (err) { if (err.code === 'ENOENT') { continue } else { throw err } } const packagePathname = path.dirname(packageJsonPathname) const name = getPackageName(pkg, packagePathname) let seenPackagePathnames = seen.get(name) if (!seenPackagePathnames) { seenPackagePathnames = new Set() seen.set(name, seenPackagePathnames) } seenPackagePathnames.add(packagePathname) } const errorMessageArray = ['must not have multiple workspaces with the same name'] for (const [packageName, seenPackagePathnames] of seen) { if (seenPackagePathnames.size > 1) { addDuplicateErrorMessages(errorMessageArray, packageName, seenPackagePathnames) } else { results.set(packageName, seenPackagePathnames.values().next().value) } } if (errorMessageArray.length > 1) { throw getError({ Type: Error, message: errorMessageArray.join('\n'), code: 'EDUPLICATEWORKSPACE', }) } return results } function addDuplicateErrorMessages (messageArray, packageName, packagePathnames) { messageArray.push( `package '${packageName}' has conflicts in the following paths:` ) for (const packagePathname of packagePathnames) { messageArray.push( ' ' + packagePathname ) } } mapWorkspaces.virtual = function (opts = {}) { if (!opts || !opts.lockfile) { throw getError({ message: 'mapWorkspaces.virtual missing lockfile info', code: 'EMAPWORKSPACESLOCKFILE', }) } const { packages = {} } = opts.lockfile const { workspaces = [] } = packages[''] || {} // uses a pathname-keyed map in order to negate the exact items const results = new Map() const { patterns, negatedPatterns } = getPatterns(workspaces) if (!patterns.length && !negatedPatterns.length) { return results } negatedPatterns.push('**/node_modules/**') const packageKeys = Object.keys(packages) for (const pattern of negatedPatterns) { for (const packageKey of minimatch.match(packageKeys, pattern)) { packageKeys.splice(packageKeys.indexOf(packageKey), 1) } } const getPackagePathname = pkgPathmame(opts) for (const pattern of patterns) { for (const packageKey of minimatch.match(packageKeys, pattern)) { const packagePathname = getPackagePathname(packageKey) const name = getPackageName(packages[packageKey], packagePathname) results.set(packagePathname, name) } } // Invert pathname-keyed to a proper name-to-pathnames Map return reverseResultMap(results) } module.exports = mapWorkspaces PK]�\�r��map-workspaces/LICENSE.mdnu�[���<!-- This file is automatically added by @npmcli/template-oss. Do not edit. --> ISC License Copyright npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\�� ��name-from-folder/package.jsonnu�[���{ "_id": "@npmcli/name-from-folder@2.0.0", "_inBundle": true, "_location": "/npm/@npmcli/name-from-folder", "_phantomChildren": {}, "_requiredBy": [ "/npm/@npmcli/arborist", "/npm/@npmcli/map-workspaces" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/name-from-folder/issues" }, "description": "Get the package name from a folder path", "devDependencies": { "@npmcli/eslint-config": "^4.0.1", "@npmcli/template-oss": "4.11.0", "tap": "^16.3.2" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/name-from-folder#readme", "license": "ISC", "main": "lib/index.js", "name": "@npmcli/name-from-folder", "repository": { "type": "git", "url": "git+https://github.com/npm/name-from-folder.git" }, "scripts": { "lint": "eslint \"**/*.js\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.11.0" }, "version": "2.0.0" } PK]�\^�(O��name-from-folder/lib/index.jsnu�[���const { basename, dirname } = require('path') const getName = (parent, base) => parent.charAt(0) === '@' ? `${parent}/${base}` : base module.exports = dir => dir ? getName(basename(dirname(dir)), basename(dir)) : false PK]�\����name-from-folder/LICENSEnu�[���The ISC License Copyright npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\��Mzredact/package.jsonnu�[���{ "_id": "@npmcli/redact@2.0.0", "_inBundle": true, "_location": "/npm/@npmcli/redact", "_phantomChildren": {}, "_requiredBy": [ "/npm", "/npm/@npmcli/arborist", "/npm/npm-registry-fetch" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/redact/issues" }, "description": "Redact sensitive npm information from output", "devDependencies": { "@npmcli/eslint-config": "^4.0.2", "@npmcli/template-oss": "4.21.3", "tap": "^16.3.10" }, "engines": { "node": "^16.14.0 || >=18.0.0" }, "exports": { ".": "./lib/index.js", "./server": "./lib/server.js", "./package.json": "./package.json" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/redact#readme", "keywords": [], "license": "ISC", "main": "lib/index.js", "name": "@npmcli/redact", "repository": { "type": "git", "url": "git+https://github.com/npm/redact.git" }, "scripts": { "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "nyc-arg": [ "--exclude", "tap-snapshots/**" ], "timeout": 120 }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.21.3", "publish": true }, "version": "2.0.0" } PK]�\���2ppredact/lib/utils.jsnu�[���const { URL_MATCHER, TYPE_URL, TYPE_REGEX, TYPE_PATH, } = require('./matchers') /** * creates a string of asterisks, * this forces a minimum asterisk for security purposes */ const asterisk = (length = 0) => { length = typeof length === 'string' ? length.length : length if (length < 8) { return '*'.repeat(8) } return '*'.repeat(length) } /** * escapes all special regex chars * @see https://stackoverflow.com/a/9310752 * @see https://github.com/tc39/proposal-regex-escaping */ const escapeRegExp = (text) => { return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, `\\$&`) } /** * provieds a regex "or" of the url versions of a string */ const urlEncodeRegexGroup = (value) => { const decoded = decodeURIComponent(value) const encoded = encodeURIComponent(value) const union = [...new Set([encoded, decoded, value])].map(escapeRegExp).join('|') return union } /** * a tagged template literal that returns a regex ensures all variables are excaped */ const urlEncodeRegexTag = (strings, ...values) => { let pattern = '' for (let i = 0; i < values.length; i++) { pattern += strings[i] + `(${urlEncodeRegexGroup(values[i])})` } pattern += strings[strings.length - 1] return new RegExp(pattern) } /** * creates a matcher for redacting url hostname */ const redactUrlHostnameMatcher = ({ hostname, replacement } = {}) => ({ type: TYPE_URL, predicate: ({ url }) => url.hostname === hostname, pattern: ({ url }) => { return urlEncodeRegexTag`(^${url.protocol}//${url.username}:.+@)?${url.hostname}` }, replacement: `$1${replacement || asterisk()}`, }) /** * creates a matcher for redacting url search / query parameter values */ const redactUrlSearchParamsMatcher = ({ param, replacement } = {}) => ({ type: TYPE_URL, predicate: ({ url }) => url.searchParams.has(param), pattern: ({ url }) => urlEncodeRegexTag`(${param}=)${url.searchParams.get(param)}`, replacement: `$1${replacement || asterisk()}`, }) /** creates a matcher for redacting the url password */ const redactUrlPasswordMatcher = ({ replacement } = {}) => ({ type: TYPE_URL, predicate: ({ url }) => url.password, pattern: ({ url }) => urlEncodeRegexTag`(^${url.protocol}//${url.username}:)${url.password}`, replacement: `$1${replacement || asterisk()}`, }) const redactUrlReplacement = (...matchers) => (subValue) => { try { const url = new URL(subValue) return redactMatchers(...matchers)(subValue, { url }) } catch (err) { return subValue } } /** * creates a matcher / submatcher for urls, this function allows you to first * collect all urls within a larger string and then pass those urls to a * submatcher * * @example * console.log("this will first match all urls, then pass those urls to the password patcher") * redactMatchers(redactUrlMatcher(redactUrlPasswordMatcher())) * * @example * console.log( * "this will assume you are passing in a string that is a url, and will redact the password" * ) * redactMatchers(redactUrlPasswordMatcher()) * */ const redactUrlMatcher = (...matchers) => { return { ...URL_MATCHER, replacement: redactUrlReplacement(...matchers), } } const matcherFunctions = { [TYPE_REGEX]: (matcher) => (value) => { if (typeof value === 'string') { value = value.replace(matcher.pattern, matcher.replacement) } return value }, [TYPE_URL]: (matcher) => (value, ctx) => { if (typeof value === 'string') { try { const url = ctx?.url || new URL(value) const { predicate, pattern } = matcher const predicateValue = predicate({ url }) if (predicateValue) { value = value.replace(pattern({ url }), matcher.replacement) } } catch (_e) { return value } } return value }, [TYPE_PATH]: (matcher) => (value, ctx) => { const rawPath = ctx?.path const path = rawPath.join('.').toLowerCase() const { predicate, replacement } = matcher const replace = typeof replacement === 'function' ? replacement : () => replacement const shouldRun = predicate({ rawPath, path }) if (shouldRun) { value = replace(value, { rawPath, path }) } return value }, } /** converts a matcher to a function */ const redactMatcher = (matcher) => { return matcherFunctions[matcher.type](matcher) } /** converts a series of matchers to a function */ const redactMatchers = (...matchers) => (value, ctx) => { const flatMatchers = matchers.flat() return flatMatchers.reduce((result, matcher) => { const fn = (typeof matcher === 'function') ? matcher : redactMatcher(matcher) return fn(result, ctx) }, value) } /** * replacement handler, keeping $1 (if it exists) and replacing the * rest of the string with asterisks, maintaining string length */ const redactDynamicReplacement = () => (value, start) => { if (typeof start === 'number') { return asterisk(value) } return start + asterisk(value.substring(start.length).length) } /** * replacement handler, keeping $1 (if it exists) and replacing the * rest of the string with a fixed number of asterisks */ const redactFixedReplacement = (length) => (_value, start) => { if (typeof start === 'number') { return asterisk(length) } return start + asterisk(length) } const redactUrlPassword = (value, replacement) => { return redactMatchers(redactUrlPasswordMatcher({ replacement }))(value) } module.exports = { asterisk, escapeRegExp, urlEncodeRegexGroup, urlEncodeRegexTag, redactUrlHostnameMatcher, redactUrlSearchParamsMatcher, redactUrlPasswordMatcher, redactUrlMatcher, redactUrlReplacement, redactDynamicReplacement, redactFixedReplacement, redactMatchers, redactUrlPassword, } PK]�\�7�EEredact/lib/index.jsnu�[���const matchers = require('./matchers') const { redactUrlPassword } = require('./utils') const REPLACE = '***' const redact = (value) => { if (typeof value !== 'string' || !value) { return value } return redactUrlPassword(value, REPLACE) .replace(matchers.NPM_SECRET.pattern, `npm_${REPLACE}`) .replace(matchers.UUID.pattern, REPLACE) } // split on \s|= similar to how nopt parses options const splitAndRedact = (str) => { // stateful regex, don't move out of this scope const splitChars = /[\s=]/g let match = null let result = '' let index = 0 while (match = splitChars.exec(str)) { result += redact(str.slice(index, match.index)) + match[0] index = splitChars.lastIndex } return result + redact(str.slice(index)) } // replaces auth info in an array of arguments or in a strings const redactLog = (arg) => { if (typeof arg === 'string') { return splitAndRedact(arg) } else if (Array.isArray(arg)) { return arg.map((a) => typeof a === 'string' ? splitAndRedact(a) : a) } return arg } module.exports = { redact, redactLog, } PK]�\��.��redact/lib/server.jsnu�[���const { AUTH_HEADER, JSON_WEB_TOKEN, NPM_SECRET, DEEP_HEADER_AUTHORIZATION, DEEP_HEADER_SET_COOKIE, REWRITE_REQUEST, REWRITE_RESPONSE, } = require('./matchers') const { redactUrlMatcher, redactUrlPasswordMatcher, redactMatchers, } = require('./utils') const { deepMap } = require('./deep-map') const _redact = redactMatchers( NPM_SECRET, AUTH_HEADER, JSON_WEB_TOKEN, DEEP_HEADER_AUTHORIZATION, DEEP_HEADER_SET_COOKIE, REWRITE_REQUEST, REWRITE_RESPONSE, redactUrlMatcher( redactUrlPasswordMatcher() ) ) const redact = (input) => deepMap(input, (value, path) => _redact(value, { path })) module.exports = { redact } PK]�\F䖮 redact/lib/deep-map.jsnu�[���const deepMap = (input, handler = v => v, path = ['$'], seen = new Set([input])) => { if (Array.isArray(input)) { const result = [] for (let i = 0; i < input.length; i++) { const element = input[i] const elementPath = [...path, i] if (element instanceof Object) { if (!seen.has(element)) { // avoid getting stuck in circular reference seen.add(element) result.push(deepMap(handler(element, elementPath), handler, elementPath, seen)) } } else { result.push(handler(element, elementPath)) } } return result } if (input === null) { return null } else if (typeof input === 'object' || typeof input === 'function') { const result = {} if (input instanceof Error) { // `name` property is not included in `Object.getOwnPropertyNames(error)` result.errorType = input.name } for (const propertyName of Object.getOwnPropertyNames(input)) { // skip logging internal properties if (propertyName.startsWith('_')) { continue } try { const property = input[propertyName] const propertyPath = [...path, propertyName] if (property instanceof Object) { if (!seen.has(property)) { // avoid getting stuck in circular reference seen.add(property) result[propertyName] = deepMap( handler(property, propertyPath), handler, propertyPath, seen ) } } else { result[propertyName] = handler(property, propertyPath) } } catch (err) { // a getter may throw an error result[propertyName] = `[error getting value: ${err.message}]` } } return result } return handler(input, path) } module.exports = { deepMap } PK]�\����))redact/lib/matchers.jsnu�[���const TYPE_REGEX = 'regex' const TYPE_URL = 'url' const TYPE_PATH = 'path' const NPM_SECRET = { type: TYPE_REGEX, pattern: /\b(npms?_)[a-zA-Z0-9]{36,48}\b/gi, replacement: `[REDACTED_NPM_SECRET]`, } const AUTH_HEADER = { type: TYPE_REGEX, pattern: /\b(Basic\s+|Bearer\s+)[\w+=\-.]+\b/gi, replacement: `[REDACTED_AUTH_HEADER]`, } const JSON_WEB_TOKEN = { type: TYPE_REGEX, pattern: /\b[A-Za-z0-9-_]{10,}(?!\.\d+\.)\.[A-Za-z0-9-_]{3,}\.[A-Za-z0-9-_]{20,}\b/gi, replacement: `[REDACTED_JSON_WEB_TOKEN]`, } const UUID = { type: TYPE_REGEX, pattern: /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/gi, replacement: `[REDACTED_UUID]`, } const URL_MATCHER = { type: TYPE_REGEX, pattern: /(?:https?|ftp):\/\/[^\s/"$.?#].[^\s"]*/gi, replacement: '[REDACTED_URL]', } const DEEP_HEADER_AUTHORIZATION = { type: TYPE_PATH, predicate: ({ path }) => path.endsWith('.headers.authorization'), replacement: '[REDACTED_HEADER_AUTHORIZATION]', } const DEEP_HEADER_SET_COOKIE = { type: TYPE_PATH, predicate: ({ path }) => path.endsWith('.headers.set-cookie'), replacement: '[REDACTED_HEADER_SET_COOKIE]', } const REWRITE_REQUEST = { type: TYPE_PATH, predicate: ({ path }) => path.endsWith('.request'), replacement: (input) => ({ method: input?.method, path: input?.path, headers: input?.headers, url: input?.url, }), } const REWRITE_RESPONSE = { type: TYPE_PATH, predicate: ({ path }) => path.endsWith('.response'), replacement: (input) => ({ data: input?.data, status: input?.status, headers: input?.headers, }), } module.exports = { TYPE_REGEX, TYPE_URL, TYPE_PATH, NPM_SECRET, AUTH_HEADER, JSON_WEB_TOKEN, UUID, URL_MATCHER, DEEP_HEADER_AUTHORIZATION, DEEP_HEADER_SET_COOKIE, REWRITE_REQUEST, REWRITE_RESPONSE, } PK]�\%��$$redact/LICENSEnu�[���MIT License Copyright (c) 2024 npm Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK]�\xI���'installed-package-contents/package.jsonnu�[���{ "_id": "@npmcli/installed-package-contents@2.1.0", "_inBundle": true, "_location": "/npm/@npmcli/installed-package-contents", "_phantomChildren": {}, "_requiredBy": [ "/npm/@npmcli/arborist", "/npm/libnpmdiff", "/npm/pacote" ], "author": { "name": "GitHub Inc." }, "bin": { "installed-package-contents": "bin/index.js" }, "bugs": { "url": "https://github.com/npm/installed-package-contents/issues" }, "dependencies": { "npm-bundled": "^3.0.0", "npm-normalize-package-bin": "^3.0.0" }, "description": "Get the list of files installed in a package in node_modules, including bundled dependencies", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.21.4", "tap": "^16.3.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/installed-package-contents#readme", "license": "ISC", "main": "lib/index.js", "name": "@npmcli/installed-package-contents", "repository": { "type": "git", "url": "git+https://github.com/npm/installed-package-contents.git" }, "scripts": { "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.21.4", "publish": true }, "version": "2.1.0" } PK]�\.� ss'installed-package-contents/lib/index.jsnu�[���// to GET CONTENTS for folder at PATH (which may be a PACKAGE): // - if PACKAGE, read path/package.json // - if bins in ../node_modules/.bin, add those to result // - if depth >= maxDepth, add PATH to result, and finish // - readdir(PATH, with file types) // - add all FILEs in PATH to result // - if PARENT: // - if depth < maxDepth, add GET CONTENTS of all DIRs in PATH // - else, add all DIRs in PATH // - if no parent // - if no bundled deps, // - if depth < maxDepth, add GET CONTENTS of DIRs in path except // node_modules // - else, add all DIRs in path other than node_modules // - if has bundled deps, // - get list of bundled deps // - add GET CONTENTS of bundled deps, PACKAGE=true, depth + 1 const bundled = require('npm-bundled') const { readFile, readdir, stat } = require('fs/promises') const { resolve, basename, dirname } = require('path') const normalizePackageBin = require('npm-normalize-package-bin') const readPackage = ({ path, packageJsonCache }) => packageJsonCache.has(path) ? Promise.resolve(packageJsonCache.get(path)) : readFile(path).then(json => { const pkg = normalizePackageBin(JSON.parse(json)) packageJsonCache.set(path, pkg) return pkg }).catch(() => null) // just normalize bundle deps and bin, that's all we care about here. const normalized = Symbol('package data has been normalized') const rpj = ({ path, packageJsonCache }) => readPackage({ path, packageJsonCache }) .then(pkg => { if (!pkg || pkg[normalized]) { return pkg } if (pkg.bundledDependencies && !pkg.bundleDependencies) { pkg.bundleDependencies = pkg.bundledDependencies delete pkg.bundledDependencies } const bd = pkg.bundleDependencies if (bd === true) { pkg.bundleDependencies = [ ...Object.keys(pkg.dependencies || {}), ...Object.keys(pkg.optionalDependencies || {}), ] } if (typeof bd === 'object' && !Array.isArray(bd)) { pkg.bundleDependencies = Object.keys(bd) } pkg[normalized] = true return pkg }) const pkgContents = async ({ path, depth = 1, currentDepth = 0, pkg = null, result = null, packageJsonCache = null, }) => { if (!result) { result = new Set() } if (!packageJsonCache) { packageJsonCache = new Map() } if (pkg === true) { return rpj({ path: path + '/package.json', packageJsonCache }) .then(p => pkgContents({ path, depth, currentDepth, pkg: p, result, packageJsonCache, })) } if (pkg) { // add all bins to result if they exist if (pkg.bin) { const dir = dirname(path) const scope = basename(dir) const nm = /^@.+/.test(scope) ? dirname(dir) : dir const binFiles = [] Object.keys(pkg.bin).forEach(b => { const base = resolve(nm, '.bin', b) binFiles.push(base, base + '.cmd', base + '.ps1') }) const bins = await Promise.all( binFiles.map(b => stat(b).then(() => b).catch(() => null)) ) bins.filter(b => b).forEach(b => result.add(b)) } } if (currentDepth >= depth) { result.add(path) return result } // we'll need bundle list later, so get that now in parallel const [dirEntries, bundleDeps] = await Promise.all([ readdir(path, { withFileTypes: true }), currentDepth === 0 && pkg && pkg.bundleDependencies ? bundled({ path, packageJsonCache }) : null, ]).catch(() => []) // not a thing, probably a missing folder if (!dirEntries) { return result } // empty folder, just add the folder itself to the result if (!dirEntries.length && !bundleDeps && currentDepth !== 0) { result.add(path) return result } const recursePromises = [] for (const entry of dirEntries) { const p = resolve(path, entry.name) if (entry.isDirectory() === false) { result.add(p) continue } if (currentDepth !== 0 || entry.name !== 'node_modules') { if (currentDepth < depth - 1) { recursePromises.push(pkgContents({ path: p, packageJsonCache, depth, currentDepth: currentDepth + 1, result, })) } else { result.add(p) } continue } } if (bundleDeps) { // bundle deps are all folders // we always recurse to get pkg bins, but if currentDepth is too high, // it'll return early before walking their contents. recursePromises.push(...bundleDeps.map(dep => { const p = resolve(path, 'node_modules', dep) return pkgContents({ path: p, packageJsonCache, pkg: true, depth, currentDepth: currentDepth + 1, result, }) })) } if (recursePromises.length) { await Promise.all(recursePromises) } return result } module.exports = ({ path, ...opts }) => pkgContents({ path: resolve(path), ...opts, pkg: true, }).then(results => [...results]) PK]�\.9����"installed-package-contents/LICENSEnu�[���The ISC License Copyright (c) npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\?�UU'installed-package-contents/bin/index.jsnu�[���#! /usr/bin/env node const { relative } = require('path') const pkgContents = require('../') const usage = `Usage: installed-package-contents <path> [-d<n> --depth=<n>] Lists the files installed for a package specified by <path>. Options: -d<n> --depth=<n> Provide a numeric value ("Infinity" is allowed) to specify how deep in the file tree to traverse. Default=1 -h --help Show this usage information` const options = {} process.argv.slice(2).forEach(arg => { let match if ((match = arg.match(/^(?:--depth=|-d)([0-9]+|Infinity)/))) { options.depth = +match[1] } else if (arg === '-h' || arg === '--help') { console.log(usage) process.exit(0) } else { options.path = arg } }) if (!options.path) { console.error('ERROR: no path provided') console.error(usage) process.exit(1) } const cwd = process.cwd() pkgContents(options) .then(list => list.sort().forEach(p => console.log(relative(cwd, p)))) .catch(/* istanbul ignore next - pretty unusual */ er => { console.error(er) process.exit(1) }) PK]�\���^^$installed-package-contents/README.mdnu�[���# @npmcli/installed-package-contents Get the list of files installed in a package in node_modules, including bundled dependencies. This is useful if you want to remove a package node from the tree _without_ removing its child nodes, for example to extract a new version of the dependency into place safely. It's sort of the reflection of [npm-packlist](http://npm.im/npm-packlist), but for listing out the _installed_ files rather than the files that _will_ be installed. This is of course a much simpler operation, because we don't have to handle ignore files or package.json `files` lists. ## USAGE ```js // programmatic usage const pkgContents = require('@npmcli/installed-package-contents') pkgContents({ path: 'node_modules/foo', depth: 1 }).then(files => { // files is an array of items that need to be passed to // rimraf or moved out of the way to make the folder empty // if foo bundled dependencies, those will be included. // It will not traverse into child directories, because we set // depth:1 in the options. // If the folder doesn't exist, this returns an empty array. }) pkgContents({ path: 'node_modules/foo', depth: Infinity }).then(files => { // setting depth:Infinity tells it to keep walking forever // until it hits something that isn't a directory, so we'll // just get the list of all files, but not their containing // directories. }) ``` As a CLI: ```bash $ installed-package-contents node_modules/bundle-some -d1 node_modules/.bin/some node_modules/bundle-some/package.json node_modules/bundle-some/node_modules/@scope/baz node_modules/bundle-some/node_modules/.bin/foo node_modules/bundle-some/node_modules/foo ``` CLI options: ``` Usage: installed-package-contents <path> [-d<n> --depth=<n>] Lists the files installed for a package specified by <path>. Options: -d<n> --depth=<n> Provide a numeric value ("Infinity" is allowed) to specify how deep in the file tree to traverse. Default=1 -h --help Show this usage information ``` ## OPTIONS * `depth` Number, default `1`. How deep to traverse through folders to get contents. Typically you'd want to set this to either `1` (to get the surface files and folders) or `Infinity` (to get all files), but any other positive number is supported as well. If set to `0` or a negative number, returns the path provided and (if it is a package) its set of linked bins. * `path` Required. Path to the package in `node_modules` where traversal should begin. ## RETURN VALUE A Promise that resolves to an array of fully-resolved files and folders matching the criteria. This includes all bundled dependencies in `node_modules`, and any linked executables in `node_modules/.bin` that the package caused to be installed. An empty or missing package folder will return an empty array. Empty directories _within_ package contents are listed, even if the `depth` argument would cause them to be traversed into. ## CAVEAT If using this module to generate a list of files that should be recursively removed to clear away the package, note that this will leave empty directories behind in certain cases: - If all child packages are bundled dependencies, then the `node_modules` folder will remain. - If all child packages within a given scope were bundled dependencies, then the `node_modules/@scope` folder will remain. - If all linked bin scripts were removed, then an empty `node_modules/.bin` folder will remain. In the interest of speed and algorithmic complexity, this module does _not_ do a subsequent readdir to see if it would remove all directory entries, though it would be easier to look at if it returned `node_modules` or `.bin` in that case rather than the contents. However, if the intent is to pass these arguments to `rimraf`, it hardly makes sense to do _two_ `readdir` calls just so that we can have the luxury of having to make a third. Since the primary use case is to delete a package's contents so that they can be re-filled with a new version of that package, this caveat does not pose a problem. Empty directories are already ignored by both npm and git. PK]�\ ��J��node-gyp/package.jsonnu�[���{ "_id": "@npmcli/node-gyp@3.0.0", "_inBundle": true, "_location": "/npm/@npmcli/node-gyp", "_phantomChildren": {}, "_requiredBy": [ "/npm/@npmcli/arborist", "/npm/@npmcli/run-script" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/node-gyp/issues" }, "description": "Tools for dealing with node-gyp packages", "devDependencies": { "@npmcli/eslint-config": "^3.0.1", "@npmcli/template-oss": "4.5.1", "tap": "^16.0.1" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/node-gyp#readme", "keywords": [ "npm", "cli", "node-gyp" ], "license": "ISC", "main": "lib/index.js", "name": "@npmcli/node-gyp", "repository": { "type": "git", "url": "git+https://github.com/npm/node-gyp.git" }, "scripts": { "lint": "eslint \"**/*.js\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.5.1" }, "version": "3.0.0" } PK]�\�~!�]]node-gyp/lib/index.jsnu�[���const util = require('util') const fs = require('fs') const { stat } = fs.promises || { stat: util.promisify(fs.stat) } async function isNodeGypPackage (path) { return await stat(`${path}/binding.gyp`) .then(st => st.isFile()) .catch(() => false) } module.exports = { isNodeGypPackage, defaultGypInstallScript: 'node-gyp rebuild', } PK]�\n�r���fs/package.jsonnu�[���{ "_id": "@npmcli/fs@3.1.1", "_inBundle": true, "_location": "/npm/@npmcli/fs", "_phantomChildren": {}, "_requiredBy": [ "/npm", "/npm/@npmcli/arborist", "/npm/cacache" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/fs/issues" }, "dependencies": { "semver": "^7.3.5" }, "description": "filesystem utilities for the npm cli", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.22.0", "tap": "^16.0.1" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/fs#readme", "keywords": [ "npm", "oss" ], "license": "ISC", "main": "lib/index.js", "name": "@npmcli/fs", "repository": { "type": "git", "url": "git+https://github.com/npm/fs.git" }, "scripts": { "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "npmclilint": "npmcli-lint", "postlint": "template-oss-check", "postsnap": "npm run lintfix --", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.22.0" }, "version": "3.1.1" } PK]�\�sY��fs/lib/move-file.jsnu�[���const { dirname, join, resolve, relative, isAbsolute } = require('path') const fs = require('fs/promises') const pathExists = async path => { try { await fs.access(path) return true } catch (er) { return er.code !== 'ENOENT' } } const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { if (!source || !destination) { throw new TypeError('`source` and `destination` file required') } options = { overwrite: true, ...options, } if (!options.overwrite && await pathExists(destination)) { throw new Error(`The destination file exists: ${destination}`) } await fs.mkdir(dirname(destination), { recursive: true }) try { await fs.rename(source, destination) } catch (error) { if (error.code === 'EXDEV' || error.code === 'EPERM') { const sourceStat = await fs.lstat(source) if (sourceStat.isDirectory()) { const files = await fs.readdir(source) await Promise.all(files.map((file) => moveFile(join(source, file), join(destination, file), options, false, symlinks) )) } else if (sourceStat.isSymbolicLink()) { symlinks.push({ source, destination }) } else { await fs.copyFile(source, destination) } } else { throw error } } if (root) { await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { let target = await fs.readlink(symSource) // junction symlinks in windows will be absolute paths, so we need to // make sure they point to the symlink destination if (isAbsolute(target)) { target = resolve(symDestination, relative(symSource, target)) } // try to determine what the actual file is so we can create the correct // type of symlink in windows let targetStat = 'file' try { targetStat = await fs.stat(resolve(dirname(symSource), target)) if (targetStat.isDirectory()) { targetStat = 'junction' } } catch { // targetStat remains 'file' } await fs.symlink( target, symDestination, targetStat ) })) await fs.rm(source, { recursive: true, force: true }) } } module.exports = moveFile PK]�\R]Sm�/�/fs/lib/cp/polyfill.jsnu�[���// this file is a modified version of the code in node 17.2.0 // which is, in turn, a modified version of the fs-extra module on npm // node core changes: // - Use of the assert module has been replaced with core's error system. // - All code related to the glob dependency has been removed. // - Bring your own custom fs module is not currently supported. // - Some basic code cleanup. // changes here: // - remove all callback related code // - drop sync support // - change assertions back to non-internal methods (see options.js) // - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows 'use strict' const { ERR_FS_CP_DIR_TO_NON_DIR, ERR_FS_CP_EEXIST, ERR_FS_CP_EINVAL, ERR_FS_CP_FIFO_PIPE, ERR_FS_CP_NON_DIR_TO_DIR, ERR_FS_CP_SOCKET, ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, ERR_FS_CP_UNKNOWN, ERR_FS_EISDIR, ERR_INVALID_ARG_TYPE, } = require('./errors.js') const { constants: { errno: { EEXIST, EISDIR, EINVAL, ENOTDIR, }, }, } = require('os') const { chmod, copyFile, lstat, mkdir, readdir, readlink, stat, symlink, unlink, utimes, } = require('fs/promises') const { dirname, isAbsolute, join, parse, resolve, sep, toNamespacedPath, } = require('path') const { fileURLToPath } = require('url') const defaultOptions = { dereference: false, errorOnExist: false, filter: undefined, force: true, preserveTimestamps: false, recursive: false, } async function cp (src, dest, opts) { if (opts != null && typeof opts !== 'object') { throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) } return cpFn( toNamespacedPath(getValidatedPath(src)), toNamespacedPath(getValidatedPath(dest)), { ...defaultOptions, ...opts }) } function getValidatedPath (fileURLOrPath) { const path = fileURLOrPath != null && fileURLOrPath.href && fileURLOrPath.origin ? fileURLToPath(fileURLOrPath) : fileURLOrPath return path } async function cpFn (src, dest, opts) { // Warn about using preserveTimestamps on 32-bit node // istanbul ignore next if (opts.preserveTimestamps && process.arch === 'ia32') { const warning = 'Using the preserveTimestamps option in 32-bit ' + 'node is not recommended' process.emitWarning(warning, 'TimestampPrecisionWarning') } const stats = await checkPaths(src, dest, opts) const { srcStat, destStat } = stats await checkParentPaths(src, srcStat, dest) if (opts.filter) { return handleFilter(checkParentDir, destStat, src, dest, opts) } return checkParentDir(destStat, src, dest, opts) } async function checkPaths (src, dest, opts) { const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) if (destStat) { if (areIdentical(srcStat, destStat)) { throw new ERR_FS_CP_EINVAL({ message: 'src and dest cannot be the same', path: dest, syscall: 'cp', errno: EINVAL, }) } if (srcStat.isDirectory() && !destStat.isDirectory()) { throw new ERR_FS_CP_DIR_TO_NON_DIR({ message: `cannot overwrite directory ${src} ` + `with non-directory ${dest}`, path: dest, syscall: 'cp', errno: EISDIR, }) } if (!srcStat.isDirectory() && destStat.isDirectory()) { throw new ERR_FS_CP_NON_DIR_TO_DIR({ message: `cannot overwrite non-directory ${src} ` + `with directory ${dest}`, path: dest, syscall: 'cp', errno: ENOTDIR, }) } } if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { throw new ERR_FS_CP_EINVAL({ message: `cannot copy ${src} to a subdirectory of self ${dest}`, path: dest, syscall: 'cp', errno: EINVAL, }) } return { srcStat, destStat } } function areIdentical (srcStat, destStat) { return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev } function getStats (src, dest, opts) { const statFunc = opts.dereference ? (file) => stat(file, { bigint: true }) : (file) => lstat(file, { bigint: true }) return Promise.all([ statFunc(src), statFunc(dest).catch((err) => { // istanbul ignore next: unsure how to cover. if (err.code === 'ENOENT') { return null } // istanbul ignore next: unsure how to cover. throw err }), ]) } async function checkParentDir (destStat, src, dest, opts) { const destParent = dirname(dest) const dirExists = await pathExists(destParent) if (dirExists) { return getStatsForCopy(destStat, src, dest, opts) } await mkdir(destParent, { recursive: true }) return getStatsForCopy(destStat, src, dest, opts) } function pathExists (dest) { return stat(dest).then( () => true, // istanbul ignore next: not sure when this would occur (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) } // Recursively check if dest parent is a subdirectory of src. // It works for all file types including symlinks since it // checks the src and dest inodes. It starts from the deepest // parent and stops once it reaches the src parent or the root path. async function checkParentPaths (src, srcStat, dest) { const srcParent = resolve(dirname(src)) const destParent = resolve(dirname(dest)) if (destParent === srcParent || destParent === parse(destParent).root) { return } let destStat try { destStat = await stat(destParent, { bigint: true }) } catch (err) { // istanbul ignore else: not sure when this would occur if (err.code === 'ENOENT') { return } // istanbul ignore next: not sure when this would occur throw err } if (areIdentical(srcStat, destStat)) { throw new ERR_FS_CP_EINVAL({ message: `cannot copy ${src} to a subdirectory of self ${dest}`, path: dest, syscall: 'cp', errno: EINVAL, }) } return checkParentPaths(src, srcStat, destParent) } const normalizePathToArray = (path) => resolve(path).split(sep).filter(Boolean) // Return true if dest is a subdir of src, otherwise false. // It only checks the path strings. function isSrcSubdir (src, dest) { const srcArr = normalizePathToArray(src) const destArr = normalizePathToArray(dest) return srcArr.every((cur, i) => destArr[i] === cur) } async function handleFilter (onInclude, destStat, src, dest, opts, cb) { const include = await opts.filter(src, dest) if (include) { return onInclude(destStat, src, dest, opts, cb) } } function startCopy (destStat, src, dest, opts) { if (opts.filter) { return handleFilter(getStatsForCopy, destStat, src, dest, opts) } return getStatsForCopy(destStat, src, dest, opts) } async function getStatsForCopy (destStat, src, dest, opts) { const statFn = opts.dereference ? stat : lstat const srcStat = await statFn(src) // istanbul ignore else: can't portably test FIFO if (srcStat.isDirectory() && opts.recursive) { return onDir(srcStat, destStat, src, dest, opts) } else if (srcStat.isDirectory()) { throw new ERR_FS_EISDIR({ message: `${src} is a directory (not copied)`, path: src, syscall: 'cp', errno: EINVAL, }) } else if (srcStat.isFile() || srcStat.isCharacterDevice() || srcStat.isBlockDevice()) { return onFile(srcStat, destStat, src, dest, opts) } else if (srcStat.isSymbolicLink()) { return onLink(destStat, src, dest) } else if (srcStat.isSocket()) { throw new ERR_FS_CP_SOCKET({ message: `cannot copy a socket file: ${dest}`, path: dest, syscall: 'cp', errno: EINVAL, }) } else if (srcStat.isFIFO()) { throw new ERR_FS_CP_FIFO_PIPE({ message: `cannot copy a FIFO pipe: ${dest}`, path: dest, syscall: 'cp', errno: EINVAL, }) } // istanbul ignore next: should be unreachable throw new ERR_FS_CP_UNKNOWN({ message: `cannot copy an unknown file type: ${dest}`, path: dest, syscall: 'cp', errno: EINVAL, }) } function onFile (srcStat, destStat, src, dest, opts) { if (!destStat) { return _copyFile(srcStat, src, dest, opts) } return mayCopyFile(srcStat, src, dest, opts) } async function mayCopyFile (srcStat, src, dest, opts) { if (opts.force) { await unlink(dest) return _copyFile(srcStat, src, dest, opts) } else if (opts.errorOnExist) { throw new ERR_FS_CP_EEXIST({ message: `${dest} already exists`, path: dest, syscall: 'cp', errno: EEXIST, }) } } async function _copyFile (srcStat, src, dest, opts) { await copyFile(src, dest) if (opts.preserveTimestamps) { return handleTimestampsAndMode(srcStat.mode, src, dest) } return setDestMode(dest, srcStat.mode) } async function handleTimestampsAndMode (srcMode, src, dest) { // Make sure the file is writable before setting the timestamp // otherwise open fails with EPERM when invoked with 'r+' // (through utimes call) if (fileIsNotWritable(srcMode)) { await makeFileWritable(dest, srcMode) return setDestTimestampsAndMode(srcMode, src, dest) } return setDestTimestampsAndMode(srcMode, src, dest) } function fileIsNotWritable (srcMode) { return (srcMode & 0o200) === 0 } function makeFileWritable (dest, srcMode) { return setDestMode(dest, srcMode | 0o200) } async function setDestTimestampsAndMode (srcMode, src, dest) { await setDestTimestamps(src, dest) return setDestMode(dest, srcMode) } function setDestMode (dest, srcMode) { return chmod(dest, srcMode) } async function setDestTimestamps (src, dest) { // The initial srcStat.atime cannot be trusted // because it is modified by the read(2) system call // (See https://nodejs.org/api/fs.html#fs_stat_time_values) const updatedSrcStat = await stat(src) return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) } function onDir (srcStat, destStat, src, dest, opts) { if (!destStat) { return mkDirAndCopy(srcStat.mode, src, dest, opts) } return copyDir(src, dest, opts) } async function mkDirAndCopy (srcMode, src, dest, opts) { await mkdir(dest) await copyDir(src, dest, opts) return setDestMode(dest, srcMode) } async function copyDir (src, dest, opts) { const dir = await readdir(src) for (let i = 0; i < dir.length; i++) { const item = dir[i] const srcItem = join(src, item) const destItem = join(dest, item) const { destStat } = await checkPaths(srcItem, destItem, opts) await startCopy(destStat, srcItem, destItem, opts) } } async function onLink (destStat, src, dest) { let resolvedSrc = await readlink(src) if (!isAbsolute(resolvedSrc)) { resolvedSrc = resolve(dirname(src), resolvedSrc) } if (!destStat) { return symlink(resolvedSrc, dest) } let resolvedDest try { resolvedDest = await readlink(dest) } catch (err) { // Dest exists and is a regular file or directory, // Windows may throw UNKNOWN error. If dest already exists, // fs throws error anyway, so no need to guard against it here. // istanbul ignore next: can only test on windows if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { return symlink(resolvedSrc, dest) } // istanbul ignore next: should not be possible throw err } if (!isAbsolute(resolvedDest)) { resolvedDest = resolve(dirname(dest), resolvedDest) } if (isSrcSubdir(resolvedSrc, resolvedDest)) { throw new ERR_FS_CP_EINVAL({ message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + `${resolvedDest}`, path: dest, syscall: 'cp', errno: EINVAL, }) } // Do not copy if src is a subdir of dest since unlinking // dest in this case would result in removing src contents // and therefore a broken symlink would be created. const srcStat = await stat(src) if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, path: dest, syscall: 'cp', errno: EINVAL, }) } return copyLink(resolvedSrc, dest) } async function copyLink (resolvedSrc, dest) { await unlink(dest) return symlink(resolvedSrc, dest) } module.exports = cp PK]�\���<<fs/lib/cp/LICENSEnu�[���(The MIT License) Copyright (c) 2011-2017 JP Richardson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK]�\��?��fs/lib/cp/index.jsnu�[���const fs = require('fs/promises') const getOptions = require('../common/get-options.js') const node = require('../common/node.js') const polyfill = require('./polyfill.js') // node 16.7.0 added fs.cp const useNative = node.satisfies('>=16.7.0') const cp = async (src, dest, opts) => { const options = getOptions(opts, { copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], }) // the polyfill is tested separately from this module, no need to hack // process.version to try to trigger it just for coverage // istanbul ignore next return useNative ? fs.cp(src, dest, options) : polyfill(src, dest, options) } module.exports = cp PK]�\`m�XD D fs/lib/cp/errors.jsnu�[���'use strict' const { inspect } = require('util') // adapted from node's internal/errors // https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js // close copy of node's internal SystemError class. class SystemError { constructor (code, prefix, context) { // XXX context.code is undefined in all constructors used in cp/polyfill // that may be a bug copied from node, maybe the constructor should use // `code` not `errno`? nodejs/node#41104 let message = `${prefix}: ${context.syscall} returned ` + `${context.code} (${context.message})` if (context.path !== undefined) { message += ` ${context.path}` } if (context.dest !== undefined) { message += ` => ${context.dest}` } this.code = code Object.defineProperties(this, { name: { value: 'SystemError', enumerable: false, writable: true, configurable: true, }, message: { value: message, enumerable: false, writable: true, configurable: true, }, info: { value: context, enumerable: true, configurable: true, writable: false, }, errno: { get () { return context.errno }, set (value) { context.errno = value }, enumerable: true, configurable: true, }, syscall: { get () { return context.syscall }, set (value) { context.syscall = value }, enumerable: true, configurable: true, }, }) if (context.path !== undefined) { Object.defineProperty(this, 'path', { get () { return context.path }, set (value) { context.path = value }, enumerable: true, configurable: true, }) } if (context.dest !== undefined) { Object.defineProperty(this, 'dest', { get () { return context.dest }, set (value) { context.dest = value }, enumerable: true, configurable: true, }) } } toString () { return `${this.name} [${this.code}]: ${this.message}` } [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { return inspect(this, { ...ctx, getters: true, customInspect: false, }) } } function E (code, message) { module.exports[code] = class NodeError extends SystemError { constructor (ctx) { super(code, message, ctx) } } } E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') E('ERR_FS_CP_EEXIST', 'Target already exists') E('ERR_FS_CP_EINVAL', 'Invalid src or dest') E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') E('ERR_FS_EISDIR', 'Path is a directory') module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { constructor (name, expected, actual) { super() this.code = 'ERR_INVALID_ARG_TYPE' this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` } } PK]�\��� ��fs/lib/readdir-scoped.jsnu�[���const { readdir } = require('fs/promises') const { join } = require('path') const readdirScoped = async (dir) => { const results = [] for (const item of await readdir(dir)) { if (item.startsWith('@')) { for (const scopedItem of await readdir(join(dir, item))) { results.push(join(item, scopedItem)) } } else { results.push(item) } } return results } module.exports = readdirScoped PK]�\I���fs/lib/index.jsnu�[���'use strict' const cp = require('./cp/index.js') const withTempDir = require('./with-temp-dir.js') const readdirScoped = require('./readdir-scoped.js') const moveFile = require('./move-file.js') module.exports = { cp, withTempDir, readdirScoped, moveFile, } PK]�\�L�΄�fs/lib/with-temp-dir.jsnu�[���const { join, sep } = require('path') const getOptions = require('./common/get-options.js') const { mkdir, mkdtemp, rm } = require('fs/promises') // create a temp directory, ensure its permissions match its parent, then call // the supplied function passing it the path to the directory. clean up after // the function finishes, whether it throws or not const withTempDir = async (root, fn, opts) => { const options = getOptions(opts, { copy: ['tmpPrefix'], }) // create the directory await mkdir(root, { recursive: true }) const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) let err let result try { result = await fn(target) } catch (_err) { err = _err } try { await rm(target, { force: true, recursive: true }) } catch { // ignore errors } if (err) { throw err } return result } module.exports = withTempDir PK]�\�i��fs/lib/common/get-options.jsnu�[���// given an input that may or may not be an object, return an object that has // a copy of every defined property listed in 'copy'. if the input is not an // object, assign it to the property named by 'wrap' const getOptions = (input, { copy, wrap }) => { const result = {} if (input && typeof input === 'object') { for (const prop of copy) { if (input[prop] !== undefined) { result[prop] = input[prop] } } } else { result[wrap] = input } return result } module.exports = getOptions PK]�\�#���fs/lib/common/node.jsnu�[���const semver = require('semver') const satisfies = (range) => { return semver.satisfies(process.version, range, { includePrerelease: true }) } module.exports = { satisfies, } PK]�\�r�� fs/LICENSE.mdnu�[���<!-- This file is automatically added by @npmcli/template-oss. Do not edit. --> ISC License Copyright npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\\�dmmquery/package.jsonnu�[���{ "_id": "@npmcli/query@3.1.0", "_inBundle": true, "_location": "/npm/@npmcli/query", "_phantomChildren": {}, "_requiredBy": [ "/npm/@npmcli/arborist" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/query/issues" }, "contributors": [ { "name": "Ruy Adorno", "url": "https://ruyadorno.com" } ], "dependencies": { "postcss-selector-parser": "^6.0.10" }, "description": "npm query parser and tools", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.21.3", "tap": "^16.2.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/query#readme", "keywords": [ "ast", "npm", "npmcli", "parser", "postcss", "postcss-selector-parser", "query" ], "license": "ISC", "main": "lib/index.js", "name": "@npmcli/query", "repository": { "type": "git", "url": "git+https://github.com/npm/query.git" }, "scripts": { "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.21.3", "publish": true }, "version": "3.1.0" } PK]�\��붾 � query/lib/index.jsnu�[���'use strict' const parser = require('postcss-selector-parser') const arrayDelimiter = Symbol('arrayDelimiter') const escapeSlashes = str => str.replace(/\//g, '\\/') const unescapeSlashes = str => str.replace(/\\\//g, '/') // recursively fixes up any :attr pseudo-class found const fixupAttr = astNode => { const properties = [] const matcher = {} for (const selectorAstNode of astNode.nodes) { const [firstAstNode] = selectorAstNode.nodes if (firstAstNode.type === 'tag') { properties.push(firstAstNode.value) } } const lastSelectorAstNode = astNode.nodes.pop() const [attributeAstNode] = lastSelectorAstNode.nodes if (attributeAstNode.value === ':attr') { const appendParts = fixupAttr(attributeAstNode) properties.push(arrayDelimiter, ...appendParts.lookupProperties) matcher.qualifiedAttribute = appendParts.attributeMatcher.qualifiedAttribute matcher.operator = appendParts.attributeMatcher.operator matcher.value = appendParts.attributeMatcher.value // backwards compatibility matcher.attribute = appendParts.attributeMatcher.attribute if (appendParts.attributeMatcher.insensitive) { matcher.insensitive = true } } else { if (attributeAstNode.type !== 'attribute') { throw Object.assign( new Error('`:attr` pseudo-class expects an attribute matcher as the last value'), { code: 'EQUERYATTR' } ) } matcher.qualifiedAttribute = unescapeSlashes(attributeAstNode.qualifiedAttribute) matcher.operator = attributeAstNode.operator matcher.value = attributeAstNode.value // backwards compatibility matcher.attribute = matcher.qualifiedAttribute if (attributeAstNode.insensitive) { matcher.insensitive = true } } astNode.lookupProperties = properties astNode.attributeMatcher = matcher astNode.nodes.length = 0 return astNode } // fixed up nested pseudo nodes will have their internal selectors moved // to a new root node that will be referenced by the `nestedNode` property, // this tweak makes it simpler to reuse `retrieveNodesFromParsedAst` to // recursively parse and extract results from the internal selectors const fixupNestedPseudo = astNode => { // create a new ast root node and relocate any children // selectors of the current ast node to this new root const newRootNode = parser.root() astNode.nestedNode = newRootNode newRootNode.nodes = [...astNode.nodes] // clean up the ast by removing the children nodes from the // current ast node while also cleaning up their `parent` refs astNode.nodes.length = 0 for (const currAstNode of newRootNode.nodes) { currAstNode.parent = newRootNode } // recursively fixup nodes of any nested selector transformAst(newRootNode) } // :semver(<version|range|selector>, [version|range|selector], [function]) // note: the first or second parameter must be a static version or range const fixupSemverSpecs = astNode => { // if we have three nodes, the last is the semver function to use, pull that out first if (astNode.nodes.length === 3) { const funcNode = astNode.nodes.pop().nodes[0] if (funcNode.type === 'tag') { astNode.semverFunc = funcNode.value } else if (funcNode.type === 'string') { // a string is always in some type of quotes, we don't want those so slice them off astNode.semverFunc = funcNode.value.slice(1, -1) } else { // anything that isn't a tag or a string isn't a function name throw Object.assign( new Error('`:semver` pseudo-class expects a function name as last value'), { code: 'ESEMVERFUNC' } ) } } // now if we have 1 node, it's a static value // istanbul ignore else if (astNode.nodes.length === 1) { const semverNode = astNode.nodes.pop() astNode.semverValue = semverNode.nodes.reduce((res, next) => `${res}${String(next)}`, '') } else if (astNode.nodes.length === 2) { // and if we have two nodes, one of them is a static value and we need to determine which it is for (let i = 0; i < astNode.nodes.length; ++i) { const type = astNode.nodes[i].nodes[0].type // the type of the first child may be combinator for ranges, such as >14 if (type === 'tag' || type === 'combinator') { const semverNode = astNode.nodes.splice(i, 1)[0] astNode.semverValue = semverNode.nodes.reduce((res, next) => `${res}${String(next)}`, '') astNode.semverPosition = i break } } if (typeof astNode.semverValue === 'undefined') { throw Object.assign( new Error('`:semver` pseudo-class expects a static value in the first or second position'), { code: 'ESEMVERVALUE' } ) } } // if we got here, the last remaining child should be attribute selector if (astNode.nodes.length === 1) { fixupAttr(astNode) } else { // if we don't have a selector, we default to `[version]` astNode.attributeMatcher = { insensitive: false, attribute: 'version', qualifiedAttribute: 'version', } astNode.lookupProperties = [] } astNode.nodes.length = 0 } const fixupTypes = astNode => { const [valueAstNode] = astNode.nodes[0].nodes const { value } = valueAstNode || {} astNode.typeValue = value astNode.nodes.length = 0 } const fixupPaths = astNode => { astNode.pathValue = unescapeSlashes(String(astNode.nodes[0])) astNode.nodes.length = 0 } const fixupOutdated = astNode => { if (astNode.nodes.length) { astNode.outdatedKind = String(astNode.nodes[0]) astNode.nodes.length = 0 } } const fixupVuln = astNode => { const vulns = [] if (astNode.nodes.length) { for (const selector of astNode.nodes) { const vuln = {} for (const node of selector.nodes) { if (node.type !== 'attribute') { throw Object.assign( new Error(':vuln pseudo-class only accepts attribute matchers or "cwe" tag'), { code: 'EQUERYATTR' } ) } if (!['severity', 'cwe'].includes(node._attribute)) { throw Object.assign( new Error(':vuln pseudo-class only matches "severity" and "cwe" attributes'), { code: 'EQUERYATTR' } ) } if (!node.operator) { node.operator = '=' node.value = '*' } if (node.operator !== '=') { throw Object.assign( new Error(':vuln pseudo-class attribute selector only accepts "=" operator', node), { code: 'EQUERYATTR' } ) } if (!vuln[node._attribute]) { vuln[node._attribute] = [] } vuln[node._attribute].push(node._value) } vulns.push(vuln) } astNode.vulns = vulns astNode.nodes.length = 0 } } // a few of the supported ast nodes need to be tweaked in order to properly be // interpreted as proper arborist query selectors, namely semver ranges from // both ids and :semver pseudo-class selectors need to be translated from what // are usually multiple ast nodes, such as: tag:1, class:.0, class:.0 to a // single `1.0.0` value, other pseudo-class selectors also get preprocessed in // order to make it simpler to execute later when traversing each ast node // using rootNode.walk(), such as :path, :type, etc. transformAst handles all // these modifications to the parsed ast by doing an extra, initial traversal // of the parsed ast from the query and modifying the parsed nodes accordingly const transformAst = selector => { selector.walk((nextAstNode) => { switch (nextAstNode.value) { case ':attr': return fixupAttr(nextAstNode) case ':is': case ':has': case ':not': return fixupNestedPseudo(nextAstNode) case ':path': return fixupPaths(nextAstNode) case ':semver': return fixupSemverSpecs(nextAstNode) case ':type': return fixupTypes(nextAstNode) case ':outdated': return fixupOutdated(nextAstNode) case ':vuln': return fixupVuln(nextAstNode) } }) } const queryParser = (query) => { // if query is an empty string or any falsy // value, just returns an empty result if (!query) { return [] } return parser(transformAst) .astSync(escapeSlashes(query), { lossless: false }) } module.exports = { parser: queryParser, arrayDelimiter, } PK]�\�r�� query/LICENSEnu�[���<!-- This file is automatically added by @npmcli/template-oss. Do not edit. --> ISC License Copyright npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\R��|��run-script/package.jsonnu�[���{ "_id": "@npmcli/run-script@8.1.0", "_inBundle": true, "_location": "/npm/@npmcli/run-script", "_phantomChildren": {}, "_requiredBy": [ "/npm", "/npm/@npmcli/arborist", "/npm/libnpmexec", "/npm/libnpmpack", "/npm/libnpmversion", "/npm/pacote" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/run-script/issues" }, "dependencies": { "@npmcli/node-gyp": "^3.0.0", "@npmcli/package-json": "^5.0.0", "@npmcli/promise-spawn": "^7.0.0", "node-gyp": "^10.0.0", "proc-log": "^4.0.0", "which": "^4.0.0" }, "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.21.4", "spawk": "^1.8.1", "tap": "^16.0.1" }, "engines": { "node": "^16.14.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/run-script#readme", "license": "ISC", "main": "lib/run-script.js", "name": "@npmcli/run-script", "repository": { "type": "git", "url": "git+https://github.com/npm/run-script.git" }, "scripts": { "eslint": "eslint", "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.21.4", "publish": "true" }, "version": "8.1.0" } PK]�\ϖ�& run-script/lib/signal-manager.jsnu�[���const runningProcs = new Set() let handlersInstalled = false const forwardedSignals = [ 'SIGINT', 'SIGTERM', ] // no-op, this is so receiving the signal doesn't cause us to exit immediately // instead, we exit after all children have exited when we re-send the signal // to ourselves. see the catch handler at the bottom of run-script-pkg.js const handleSignal = signal => { for (const proc of runningProcs) { proc.kill(signal) } } const setupListeners = () => { for (const signal of forwardedSignals) { process.on(signal, handleSignal) } handlersInstalled = true } const cleanupListeners = () => { if (runningProcs.size === 0) { for (const signal of forwardedSignals) { process.removeListener(signal, handleSignal) } handlersInstalled = false } } const add = proc => { runningProcs.add(proc) if (!handlersInstalled) { setupListeners() } proc.once('exit', () => { runningProcs.delete(proc) cleanupListeners() }) } module.exports = { add, handleSignal, forwardedSignals, } PK]�\�;{run-script/lib/run-script.jsnu�[���const PackageJson = require('@npmcli/package-json') const runScriptPkg = require('./run-script-pkg.js') const validateOptions = require('./validate-options.js') const isServerPackage = require('./is-server-package.js') const runScript = async options => { validateOptions(options) if (options.pkg) { return runScriptPkg(options) } const { content: pkg } = await PackageJson.normalize(options.path) return runScriptPkg({ ...options, pkg }) } module.exports = Object.assign(runScript, { isServerPackage }) PK]�\�S�F//!run-script/lib/make-spawn-args.jsnu�[���/* eslint camelcase: "off" */ const setPATH = require('./set-path.js') const { resolve } = require('path') const npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js') const makeSpawnArgs = options => { const { event, path, scriptShell = true, binPaths, env, stdio, cmd, args, stdioString, } = options const spawnEnv = setPATH(path, binPaths, { // we need to at least save the PATH environment var ...process.env, ...env, npm_package_json: resolve(path, 'package.json'), npm_lifecycle_event: event, npm_lifecycle_script: cmd, npm_config_node_gyp, }) const spawnOpts = { env: spawnEnv, stdioString, stdio, cwd: path, shell: scriptShell, } return [cmd, args, spawnOpts] } module.exports = makeSpawnArgs PK]�\���C::run-script/lib/package-envs.jsnu�[���const packageEnvs = (vals, prefix, env = {}) => { for (const [key, val] of Object.entries(vals)) { if (val === undefined) { continue } else if (val === null || val === false) { env[`${prefix}${key}`] = '' } else if (Array.isArray(val)) { val.forEach((item, index) => { packageEnvs({ [`${key}_${index}`]: item }, `${prefix}`, env) }) } else if (typeof val === 'object') { packageEnvs(val, `${prefix}${key}_`, env) } else { env[`${prefix}${key}`] = String(val) } } return env } // https://github.com/npm/rfcs/pull/183 defines which fields we put into the environment module.exports = pkg => { return packageEnvs({ name: pkg.name, version: pkg.version, config: pkg.config, engines: pkg.engines, bin: pkg.bin, }, 'npm_package_') } PK]�\��GI""(run-script/lib/node-gyp-bin/node-gyp.cmdnu�[���@node "%npm_config_node_gyp%" %* PK]�\�ଳ33$run-script/lib/node-gyp-bin/node-gypnu�[���#!/usr/bin/env sh node "$npm_config_node_gyp" "$@" PK]�\n|K=��run-script/lib/set-path.jsnu�[���const { resolve, dirname, delimiter } = require('path') // the path here is relative, even though it does not need to be // in order to make the posix tests pass in windows const nodeGypPath = resolve(__dirname, '../lib/node-gyp-bin') // Windows typically calls its PATH environ 'Path', but this is not // guaranteed, nor is it guaranteed to be the only one. Merge them // all together in the order they appear in the object. const setPATH = (projectPath, binPaths, env) => { const PATH = Object.keys(env).filter(p => /^path$/i.test(p) && env[p]) .map(p => env[p].split(delimiter)) .reduce((set, p) => set.concat(p.filter(concatted => !set.includes(concatted))), []) .join(delimiter) const pathArr = [] if (binPaths) { pathArr.push(...binPaths) } // unshift the ./node_modules/.bin from every folder // walk up until dirname() does nothing, at the root // XXX we should specify a cwd that we don't go above let p = projectPath let pp do { pathArr.push(resolve(p, 'node_modules', '.bin')) pp = p p = dirname(p) } while (p !== pp) pathArr.push(nodeGypPath, PATH) const pathVal = pathArr.join(delimiter) // XXX include the node-gyp-bin path somehow? Probably better for // npm or arborist or whoever to just provide that by putting it in // the PATH environ, since that's preserved anyway. for (const key of Object.keys(env)) { if (/^path$/i.test(key)) { env[key] = pathVal } } return env } module.exports = setPATH PK]�\E�_��"run-script/lib/validate-options.jsnu�[���const validateOptions = options => { if (typeof options !== 'object' || !options) { throw new TypeError('invalid options object provided to runScript') } const { event, path, scriptShell, env = {}, stdio = 'pipe', args = [], cmd, } = options if (!event || typeof event !== 'string') { throw new TypeError('valid event not provided to runScript') } if (!path || typeof path !== 'string') { throw new TypeError('valid path not provided to runScript') } if (scriptShell !== undefined && typeof scriptShell !== 'string') { throw new TypeError('invalid scriptShell option provided to runScript') } if (typeof env !== 'object' || !env) { throw new TypeError('invalid env option provided to runScript') } if (typeof stdio !== 'string' && !Array.isArray(stdio)) { throw new TypeError('invalid stdio option provided to runScript') } if (!Array.isArray(args) || args.some(a => typeof a !== 'string')) { throw new TypeError('invalid args option provided to runScript') } if (cmd !== undefined && typeof cmd !== 'string') { throw new TypeError('invalid cmd option provided to runScript') } } module.exports = validateOptions PK]�\�O#�� run-script/lib/run-script-pkg.jsnu�[���const makeSpawnArgs = require('./make-spawn-args.js') const promiseSpawn = require('@npmcli/promise-spawn') const packageEnvs = require('./package-envs.js') const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp') const signalManager = require('./signal-manager.js') const isServerPackage = require('./is-server-package.js') const runScriptPkg = async options => { const { event, path, scriptShell, binPaths = false, env = {}, stdio = 'pipe', pkg, args = [], stdioString, // how long to wait for a process.kill signal // only exposed here so that we can make the test go a bit faster. signalTimeout = 500, } = options const { scripts = {}, gypfile } = pkg let cmd = null if (options.cmd) { cmd = options.cmd } else if (pkg.scripts && pkg.scripts[event]) { cmd = pkg.scripts[event] } else if ( // If there is no preinstall or install script, default to rebuilding node-gyp packages. event === 'install' && !scripts.install && !scripts.preinstall && gypfile !== false && await isNodeGypPackage(path) ) { cmd = defaultGypInstallScript } else if (event === 'start' && await isServerPackage(path)) { cmd = 'node server.js' } if (!cmd) { return { code: 0, signal: null } } let inputEnd = () => {} if (stdio === 'inherit') { let banner if (pkg._id) { banner = `\n> ${pkg._id} ${event}\n` } else { banner = `\n> ${event}\n` } banner += `> ${cmd.trim().replace(/\n/g, '\n> ')}` if (args.length) { banner += ` ${args.join(' ')}` } banner += '\n' const { output, input } = require('proc-log') output.standard(banner) inputEnd = input.start() } const [spawnShell, spawnArgs, spawnOpts] = makeSpawnArgs({ event, path, scriptShell, binPaths, env: { ...env, ...packageEnvs(pkg) }, stdio, cmd, args, stdioString, }) const p = promiseSpawn(spawnShell, spawnArgs, spawnOpts, { event, script: cmd, pkgid: pkg._id, path, }) if (stdio === 'inherit') { signalManager.add(p.process) } if (p.stdin) { p.stdin.end() } return p.catch(er => { const { signal } = er // coverage disabled because win32 never emits signals /* istanbul ignore next */ if (stdio === 'inherit' && signal) { // by the time we reach here, the child has already exited. we send the // signal back to ourselves again so that npm will exit with the same // status as the child process.kill(process.pid, signal) // just in case we don't die, reject after 500ms // this also keeps the node process open long enough to actually // get the signal, rather than terminating gracefully. return new Promise((res, rej) => setTimeout(() => rej(er), signalTimeout)) } else { throw er } }).finally(inputEnd) } module.exports = runScriptPkg PK]�\o<vX��#run-script/lib/is-server-package.jsnu�[���const { stat } = require('node:fs/promises') const { resolve } = require('node:path') module.exports = async path => { try { const st = await stat(resolve(path, 'server.js')) return st.isFile() } catch (er) { return false } } PK]�\.9����run-script/LICENSEnu�[���The ISC License Copyright (c) npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\�`�L��git/package.jsonnu�[���{ "_id": "@npmcli/git@5.0.7", "_inBundle": true, "_location": "/npm/@npmcli/git", "_phantomChildren": {}, "_requiredBy": [ "/npm/@npmcli/package-json", "/npm/libnpmversion", "/npm/pacote" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/git/issues" }, "dependencies": { "@npmcli/promise-spawn": "^7.0.0", "lru-cache": "^10.0.1", "npm-pick-manifest": "^9.0.0", "proc-log": "^4.0.0", "promise-inflight": "^1.0.1", "promise-retry": "^2.0.1", "semver": "^7.3.5", "which": "^4.0.0" }, "description": "a util for spawning git from npm CLI contexts", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.22.0", "npm-package-arg": "^11.0.0", "slash": "^3.0.0", "tap": "^16.0.1" }, "engines": { "node": "^16.14.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/git#readme", "license": "ISC", "main": "lib/index.js", "name": "@npmcli/git", "repository": { "type": "git", "url": "git+https://github.com/npm/git.git" }, "scripts": { "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "timeout": 600, "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.22.0", "publish": true }, "version": "5.0.7" } PK]�\��E2mmgit/lib/utils.jsnu�[���const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32' exports.isWindows = isWindows PK]�\T1�I]]git/lib/make-error.jsnu�[���const { GitConnectionError, GitPathspecError, GitUnknownError, } = require('./errors.js') const connectionErrorRe = new RegExp([ 'remote error: Internal Server Error', 'The remote end hung up unexpectedly', 'Connection timed out', 'Operation timed out', 'Failed to connect to .* Timed out', 'Connection reset by peer', 'SSL_ERROR_SYSCALL', 'The requested URL returned error: 503', ].join('|')) const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/ function makeError (er) { const message = er.stderr let gitEr if (connectionErrorRe.test(message)) { gitEr = new GitConnectionError(message) } else if (missingPathspecRe.test(message)) { gitEr = new GitPathspecError(message) } else { gitEr = new GitUnknownError(message) } return Object.assign(gitEr, er) } module.exports = makeError PK]�\�z�IIgit/lib/clone.jsnu�[���// The goal here is to minimize both git workload and // the number of refs we download over the network. // // Every method ends up with the checked out working dir // at the specified ref, and resolves with the git sha. // Only certain whitelisted hosts get shallow cloning. // Many hosts (including GHE) don't always support it. // A failed shallow fetch takes a LOT longer than a full // fetch in most cases, so we skip it entirely. // Set opts.gitShallow = true/false to force this behavior // one way or the other. const shallowHosts = new Set([ 'github.com', 'gist.github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org', ]) // we have to use url.parse until we add the same shim that hosted-git-info has // to handle scp:// urls const { parse } = require('url') // eslint-disable-line node/no-deprecated-api const path = require('path') const getRevs = require('./revs.js') const spawn = require('./spawn.js') const { isWindows } = require('./utils.js') const pickManifest = require('npm-pick-manifest') const fs = require('fs/promises') module.exports = (repo, ref = 'HEAD', target = null, opts = {}) => getRevs(repo, opts).then(revs => clone( repo, revs, ref, resolveRef(revs, ref, opts), target || defaultTarget(repo, opts.cwd), opts )) const maybeShallow = (repo, opts) => { if (opts.gitShallow === false || opts.gitShallow) { return opts.gitShallow } return shallowHosts.has(parse(repo).host) } const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) => path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, ''))) const clone = (repo, revs, ref, revDoc, target, opts) => { if (!revDoc) { return unresolved(repo, ref, target, opts) } if (revDoc.sha === revs.refs.HEAD.sha) { return plain(repo, revDoc, target, opts) } if (revDoc.type === 'tag' || revDoc.type === 'branch') { return branch(repo, revDoc, target, opts) } return other(repo, revDoc, target, opts) } const resolveRef = (revs, ref, opts) => { const { spec = {} } = opts ref = spec.gitCommittish || ref /* istanbul ignore next - will fail anyway, can't pull */ if (!revs) { return null } if (spec.gitRange) { return pickManifest(revs, spec.gitRange, opts) } if (!ref) { return revs.refs.HEAD } if (revs.refs[ref]) { return revs.refs[ref] } if (revs.shas[ref]) { return revs.refs[revs.shas[ref][0]] } return null } // pull request or some other kind of advertised ref const other = (repo, revDoc, target, opts) => { const shallow = maybeShallow(repo, opts) const fetchOrigin = ['fetch', 'origin', revDoc.rawRef] .concat(shallow ? ['--depth=1'] : []) const git = (args) => spawn(args, { ...opts, cwd: target }) return fs.mkdir(target, { recursive: true }) .then(() => git(['init'])) .then(() => isWindows(opts) ? git(['config', '--local', '--add', 'core.longpaths', 'true']) : null) .then(() => git(['remote', 'add', 'origin', repo])) .then(() => git(fetchOrigin)) .then(() => git(['checkout', revDoc.sha])) .then(() => updateSubmodules(target, opts)) .then(() => revDoc.sha) } // tag or branches. use -b const branch = (repo, revDoc, target, opts) => { const args = [ 'clone', '-b', revDoc.ref, repo, target, '--recurse-submodules', ] if (maybeShallow(repo, opts)) { args.push('--depth=1') } if (isWindows(opts)) { args.push('--config', 'core.longpaths=true') } return spawn(args, opts).then(() => revDoc.sha) } // just the head. clone it const plain = (repo, revDoc, target, opts) => { const args = [ 'clone', repo, target, '--recurse-submodules', ] if (maybeShallow(repo, opts)) { args.push('--depth=1') } if (isWindows(opts)) { args.push('--config', 'core.longpaths=true') } return spawn(args, opts).then(() => revDoc.sha) } const updateSubmodules = async (target, opts) => { const hasSubmodules = await fs.stat(`${target}/.gitmodules`) .then(() => true) .catch(() => false) if (!hasSubmodules) { return null } return spawn([ 'submodule', 'update', '-q', '--init', '--recursive', ], { ...opts, cwd: target }) } const unresolved = (repo, ref, target, opts) => { // can't do this one shallowly, because the ref isn't advertised // but we can avoid checking out the working dir twice, at least const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : [] const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git'] const git = (args) => spawn(args, { ...opts, cwd: target }) return fs.mkdir(target, { recursive: true }) .then(() => git(cloneArgs.concat(lp))) .then(() => git(['init'])) .then(() => git(['checkout', ref])) .then(() => updateSubmodules(target, opts)) .then(() => git(['rev-parse', '--revs-only', 'HEAD'])) .then(({ stdout }) => stdout.trim()) } PK]�\�y�<�� git/lib/is.jsnu�[���// not an airtight indicator, but a good gut-check to even bother trying const { stat } = require('fs/promises') module.exports = ({ cwd = process.cwd() } = {}) => stat(cwd + '/.git').then(() => true, () => false) PK]�\�]��git/lib/spawn.jsnu�[���const spawn = require('@npmcli/promise-spawn') const promiseRetry = require('promise-retry') const { log } = require('proc-log') const makeError = require('./make-error.js') const makeOpts = require('./opts.js') module.exports = (gitArgs, opts = {}) => { const whichGit = require('./which.js') const gitPath = whichGit(opts) if (gitPath instanceof Error) { return Promise.reject(gitPath) } // undocumented option, mostly only here for tests const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects' ? gitArgs : ['--no-replace-objects', ...gitArgs] let retryOpts = opts.retry if (retryOpts === null || retryOpts === undefined) { retryOpts = { retries: opts.fetchRetries || 2, factor: opts.fetchRetryFactor || 10, maxTimeout: opts.fetchRetryMaxtimeout || 60000, minTimeout: opts.fetchRetryMintimeout || 1000, } } return promiseRetry((retryFn, number) => { if (number !== 1) { log.silly('git', `Retrying git command: ${ args.join(' ')} attempt # ${number}`) } return spawn(gitPath, args, makeOpts(opts)) .catch(er => { const gitError = makeError(er) if (!gitError.shouldRetry(number)) { throw gitError } retryFn(gitError) }) }, retryOpts) } PK]�\�@�K��git/lib/index.jsnu�[���module.exports = { clone: require('./clone.js'), revs: require('./revs.js'), spawn: require('./spawn.js'), is: require('./is.js'), find: require('./find.js'), isClean: require('./is-clean.js'), errors: require('./errors.js'), } PK]�\��o���git/lib/is-clean.jsnu�[���const spawn = require('./spawn.js') module.exports = (opts = {}) => spawn(['status', '--porcelain=v1', '-uno'], opts) .then(res => !res.stdout.trim().split(/\r?\n+/) .map(l => l.trim()).filter(l => l).length) PK]�\U� �CCgit/lib/errors.jsnu�[��� const maxRetry = 3 class GitError extends Error { shouldRetry () { return false } } class GitConnectionError extends GitError { constructor () { super('A git connection error occurred') } shouldRetry (number) { return number < maxRetry } } class GitPathspecError extends GitError { constructor () { super('The git reference could not be found') } } class GitUnknownError extends GitError { constructor () { super('An unknown git error occurred') } } module.exports = { GitConnectionError, GitPathspecError, GitUnknownError, } PK]�\�]n���git/lib/lines-to-revs.jsnu�[���// turn an array of lines from `git ls-remote` into a thing // vaguely resembling a packument, where docs are a resolved ref const semver = require('semver') module.exports = lines => finish(lines.reduce(linesToRevsReducer, { versions: {}, 'dist-tags': {}, refs: {}, shas: {}, })) const finish = revs => distTags(shaList(peelTags(revs))) // We can check out shallow clones on specific SHAs if we have a ref const shaList = revs => { Object.keys(revs.refs).forEach(ref => { const doc = revs.refs[ref] if (!revs.shas[doc.sha]) { revs.shas[doc.sha] = [ref] } else { revs.shas[doc.sha].push(ref) } }) return revs } // Replace any tags with their ^{} counterparts, if those exist const peelTags = revs => { Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => { const peeled = revs.refs[ref] const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')] if (unpeeled) { unpeeled.sha = peeled.sha delete revs.refs[ref] } }) return revs } const distTags = revs => { // not entirely sure what situations would result in an // ichabod repo, but best to be careful in Sleepy Hollow anyway const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {} const versions = Object.keys(revs.versions) versions.forEach(v => { // simulate a dist-tags with latest pointing at the // 'latest' branch if one exists and is a version, // or HEAD if not. const ver = revs.versions[v] if (revs.refs.latest && ver.sha === revs.refs.latest.sha) { revs['dist-tags'].latest = v } else if (ver.sha === HEAD.sha) { revs['dist-tags'].HEAD = v if (!revs.refs.latest) { revs['dist-tags'].latest = v } } }) return revs } const refType = ref => { if (ref.startsWith('refs/tags/')) { return 'tag' } if (ref.startsWith('refs/heads/')) { return 'branch' } if (ref.startsWith('refs/pull/')) { return 'pull' } if (ref === 'HEAD') { return 'head' } // Could be anything, ignore for now /* istanbul ignore next */ return 'other' } // return the doc, or null if we should ignore it. const lineToRevDoc = line => { const split = line.trim().split(/\s+/, 2) if (split.length < 2) { return null } const sha = split[0].trim() const rawRef = split[1].trim() const type = refType(rawRef) if (type === 'tag') { // refs/tags/foo^{} is the 'peeled tag', ie the commit // that is tagged by refs/tags/foo they resolve to the same // content, just different objects in git's data structure. // But, we care about the thing the tag POINTS to, not the tag // object itself, so we only look at the peeled tag refs, and // ignore the pointer. // For now, though, we have to save both, because some tags // don't have peels, if they were not annotated. const ref = rawRef.slice('refs/tags/'.length) return { sha, ref, rawRef, type } } if (type === 'branch') { const ref = rawRef.slice('refs/heads/'.length) return { sha, ref, rawRef, type } } if (type === 'pull') { // NB: merged pull requests installable with #pull/123/merge // for the merged pr, or #pull/123 for the PR head const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '') return { sha, ref, rawRef, type } } if (type === 'head') { const ref = 'HEAD' return { sha, ref, rawRef, type } } // at this point, all we can do is leave the ref un-munged return { sha, ref: rawRef, rawRef, type } } const linesToRevsReducer = (revs, line) => { const doc = lineToRevDoc(line) if (!doc) { return revs } revs.refs[doc.ref] = doc revs.refs[doc.rawRef] = doc if (doc.type === 'tag') { // try to pull a semver value out of tags like `release-v1.2.3` // which is a pretty common pattern. const match = !doc.ref.endsWith('^{}') && doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/) if (match && semver.valid(match[1], true)) { revs.versions[semver.clean(match[1], true)] = doc } } return revs } PK]�\��s2SSgit/lib/which.jsnu�[���const which = require('which') let gitPath try { gitPath = which.sync('git') } catch { // ignore errors } module.exports = (opts = {}) => { if (opts.git) { return opts.git } if (!gitPath || opts.git === false) { return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' }) } return gitPath } PK]�\[�l{<<git/lib/find.jsnu�[���const is = require('./is.js') const { dirname } = require('path') module.exports = async ({ cwd = process.cwd(), root } = {}) => { while (true) { if (await is({ cwd })) { return cwd } const next = dirname(cwd) if (cwd === root || cwd === next) { return null } cwd = next } } PK]�\�D���git/lib/revs.jsnu�[���const pinflight = require('promise-inflight') const spawn = require('./spawn.js') const { LRUCache } = require('lru-cache') const revsCache = new LRUCache({ max: 100, ttl: 5 * 60 * 1000, }) const linesToRevs = require('./lines-to-revs.js') module.exports = async (repo, opts = {}) => { if (!opts.noGitRevCache) { const cached = revsCache.get(repo) if (cached) { return cached } } return pinflight(`ls-remote:${repo}`, () => spawn(['ls-remote', repo], opts) .then(({ stdout }) => linesToRevs(stdout.trim().split('\n'))) .then(revs => { revsCache.set(repo, revs) return revs }) ) } PK]�\���Sssgit/lib/opts.jsnu�[���// Values we want to set if they're not already defined by the end user // This defaults to accepting new ssh host key fingerprints const gitEnv = { GIT_ASKPASS: 'echo', GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new', } module.exports = (opts = {}) => ({ stdioString: true, ...opts, shell: false, env: opts.env || { ...gitEnv, ...process.env }, }) PK]�\|�q���git/LICENSEnu�[���The ISC License Copyright (c) npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\-�yc��config/package.jsonnu�[���{ "_id": "@npmcli/config@8.3.3", "_inBundle": true, "_location": "/npm/@npmcli/config", "_phantomChildren": {}, "_requiredBy": [ "/npm" ], "author": { "name": "GitHub Inc." }, "bugs": { "url": "https://github.com/npm/cli/issues" }, "dependencies": { "@npmcli/map-workspaces": "^3.0.2", "ci-info": "^4.0.0", "ini": "^4.1.2", "nopt": "^7.2.1", "proc-log": "^4.2.0", "read-package-json-fast": "^3.0.2", "semver": "^7.3.5", "walk-up-path": "^3.0.1" }, "description": "Configuration management for the npm cli", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/mock-globals": "^1.0.0", "@npmcli/template-oss": "4.22.0", "tap": "^16.3.8" }, "engines": { "node": "^16.14.0 || >=18.0.0" }, "files": [ "bin/", "lib/" ], "homepage": "https://github.com/npm/cli#readme", "license": "ISC", "main": "lib/index.js", "name": "@npmcli/config", "repository": { "type": "git", "url": "git+https://github.com/npm/cli.git", "directory": "workspaces/config" }, "scripts": { "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", "lintfix": "npm run lint -- --fix", "postlint": "template-oss-check", "posttest": "npm run lint", "snap": "tap", "template-oss-apply": "template-oss-apply --force", "test": "tap" }, "tap": { "nyc-arg": [ "--exclude", "tap-snapshots/**" ] }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.22.0", "content": "../../scripts/template-oss/index.js" }, "version": "8.3.3" } PK]�\������config/lib/nerf-dart.jsnu�[���const { URL } = require('node:url') /** * Maps a URL to an identifier. * * Name courtesy schiffertronix media LLC, a New Jersey corporation * * @param {String} uri The URL to be nerfed. * * @returns {String} A nerfed URL. */ module.exports = (url) => { const parsed = new URL(url) const from = `${parsed.protocol}//${parsed.host}${parsed.pathname}` const rel = new URL('.', from) const res = `//${rel.host}${rel.pathname}` return res } PK]�\E���I I %config/lib/definitions/definitions.jsnu�[���const Definition = require('./definition.js') const ciInfo = require('ci-info') const querystring = require('node:querystring') const { join } = require('node:path') const isWindows = process.platform === 'win32' // used by cafile flattening to flatOptions.ca const { readFileSync } = require('node:fs') const maybeReadFile = file => { try { return readFileSync(file, 'utf8') } catch (er) { if (er.code !== 'ENOENT') { throw er } return null } } const buildOmitList = obj => { const include = obj.include || [] const omit = obj.omit || [] const only = obj.only if (/^prod(uction)?$/.test(only) || obj.production) { omit.push('dev') } else if (obj.production === false) { include.push('dev') } if (/^dev/.test(obj.also)) { include.push('dev') } if (obj.dev) { include.push('dev') } if (obj.optional === false) { omit.push('optional') } else if (obj.optional === true) { include.push('optional') } obj.omit = [...new Set(omit)].filter(type => !include.includes(type)) obj.include = [...new Set(include)] if (obj.omit.includes('dev')) { process.env.NODE_ENV = 'production' } return obj.omit } const editor = process.env.EDITOR || process.env.VISUAL || (isWindows ? `${process.env.SYSTEMROOT}\\notepad.exe` : 'vi') const shell = isWindows ? process.env.ComSpec || 'cmd' : process.env.SHELL || 'sh' const { networkInterfaces } = require('node:os') const getLocalAddresses = () => { try { return Object.values(networkInterfaces()).map( int => int.map(({ address }) => address) ).reduce((set, addrs) => set.concat(addrs), [null]) } catch (e) { return [null] } } const unicode = /UTF-?8$/i.test( process.env.LC_ALL || process.env.LC_CTYPE || process.env.LANG ) // use LOCALAPPDATA on Windows, if set // https://github.com/npm/cli/pull/899 const cacheRoot = (isWindows && process.env.LOCALAPPDATA) || '~' const cacheExtra = isWindows ? 'npm-cache' : '.npm' const cache = `${cacheRoot}/${cacheExtra}` // TODO: refactor these type definitions so that they are less // weird to pull out of the config module. // TODO: use better type definition/validation API, nopt's is so weird. const { semver: { type: Semver }, Umask: { type: Umask }, url: { type: url }, path: { type: path }, } = require('../type-defs.js') // basic flattening function, just copy it over camelCase const flatten = (key, obj, flatOptions) => { const camel = key.replace(/-([a-z])/g, (_0, _1) => _1.toUpperCase()) flatOptions[camel] = obj[key] } // TODO: // Instead of having each definition provide a flatten method, // provide the (?list of?) flat option field(s?) that it impacts. // When that config is set, we mark the relevant flatOption fields // dirty. Then, a getter for that field defines how we actually // set it. // // So, `save-dev`, `save-optional`, `save-prod`, et al would indicate // that they affect the `saveType` flat option. Then the config.flat // object has a `get saveType () { ... }` that looks at the "real" // config settings from files etc and returns the appropriate value. // // Getters will also (maybe?) give us a hook to audit flat option // usage, so we can document and group these more appropriately. // // This will be a problem with cases where we currently do: // const opts = { ...npm.flatOptions, foo: 'bar' }, but we can maybe // instead do `npm.config.set('foo', 'bar')` prior to passing the // config object down where it needs to go. // // This way, when we go hunting for "where does saveType come from anyway!?" // while fixing some Arborist bug, we won't have to hunt through too // many places. // XXX: We should really deprecate all these `--save-blah` switches // in favor of a single `--save-type` option. The unfortunate shortcut // we took for `--save-peer --save-optional` being `--save-type=peerOptional` // makes this tricky, and likely a breaking change. // Define all config keys we know about. They are indexed by their own key for // ease of lookup later. This duplication is an optimization so that we don't // have to do an extra function call just to "reuse" the key in both places. const definitions = { _auth: new Definition('_auth', { default: null, type: [null, String], description: ` A basic-auth string to use when authenticating against the npm registry. This will ONLY be used to authenticate against the npm registry. For other registries you will need to scope it like "//other-registry.tld/:_auth" Warning: This should generally not be set via a command-line option. It is safer to use a registry-provided authentication bearer token stored in the ~/.npmrc file by running \`npm login\`. `, flatten, }), access: new Definition('access', { default: null, defaultDescription: ` 'public' for new packages, existing packages it will not change the current level `, type: [null, 'restricted', 'public'], description: ` If you do not want your scoped package to be publicly viewable (and installable) set \`--access=restricted\`. Unscoped packages can not be set to \`restricted\`. Note: This defaults to not changing the current access level for existing packages. Specifying a value of \`restricted\` or \`public\` during publish will change the access for an existing package the same way that \`npm access set status\` would. `, flatten, }), all: new Definition('all', { default: false, type: Boolean, short: 'a', description: ` When running \`npm outdated\` and \`npm ls\`, setting \`--all\` will show all outdated or installed packages, rather than only those directly depended upon by the current project. `, flatten, }), 'allow-same-version': new Definition('allow-same-version', { default: false, type: Boolean, description: ` Prevents throwing an error when \`npm version\` is used to set the new version to the same value as the current version. `, flatten, }), also: new Definition('also', { default: null, type: [null, 'dev', 'development'], description: ` When set to \`dev\` or \`development\`, this is an alias for \`--include=dev\`. `, deprecated: 'Please use --include=dev instead.', flatten (key, obj, flatOptions) { definitions.omit.flatten('omit', obj, flatOptions) }, }), audit: new Definition('audit', { default: true, type: Boolean, description: ` When "true" submit audit reports alongside the current npm command to the default registry and all registries configured for scopes. See the documentation for [\`npm audit\`](/commands/npm-audit) for details on what is submitted. `, flatten, }), 'audit-level': new Definition('audit-level', { default: null, type: [null, 'info', 'low', 'moderate', 'high', 'critical', 'none'], description: ` The minimum level of vulnerability for \`npm audit\` to exit with a non-zero exit code. `, flatten, }), 'auth-type': new Definition('auth-type', { default: 'web', type: ['legacy', 'web'], description: ` What authentication strategy to use with \`login\`. Note that if an \`otp\` config is given, this value will always be set to \`legacy\`. `, flatten, }), before: new Definition('before', { default: null, type: [null, Date], description: ` If passed to \`npm install\`, will rebuild the npm tree such that only versions that were available **on or before** the \`--before\` time get installed. If there's no versions available for the current set of direct dependencies, the command will error. If the requested version is a \`dist-tag\` and the given tag does not pass the \`--before\` filter, the most recent version less than or equal to that tag will be used. For example, \`foo@latest\` might install \`foo@1.2\` even though \`latest\` is \`2.0\`. `, flatten, }), 'bin-links': new Definition('bin-links', { default: true, type: Boolean, description: ` Tells npm to create symlinks (or \`.cmd\` shims on Windows) for package executables. Set to false to have it not do this. This can be used to work around the fact that some file systems don't support symlinks, even on ostensibly Unix systems. `, flatten, }), browser: new Definition('browser', { default: null, defaultDescription: ` OS X: \`"open"\`, Windows: \`"start"\`, Others: \`"xdg-open"\` `, type: [null, Boolean, String], description: ` The browser that is called by npm commands to open websites. Set to \`false\` to suppress browser behavior and instead print urls to terminal. Set to \`true\` to use default system URL opener. `, flatten, }), ca: new Definition('ca', { default: null, type: [null, String, Array], description: ` The Certificate Authority signing certificate that is trusted for SSL connections to the registry. Values should be in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines replaced by the string "\\n". For example: \`\`\`ini ca="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----" \`\`\` Set to \`null\` to only allow "known" registrars, or to a specific CA cert to trust only that specific signing authority. Multiple CAs can be trusted by specifying an array of certificates: \`\`\`ini ca[]="..." ca[]="..." \`\`\` See also the \`strict-ssl\` config. `, flatten, }), cache: new Definition('cache', { default: cache, defaultDescription: ` Windows: \`%LocalAppData%\\npm-cache\`, Posix: \`~/.npm\` `, type: path, description: ` The location of npm's cache directory. `, flatten (key, obj, flatOptions) { flatOptions.cache = join(obj.cache, '_cacache') flatOptions.npxCache = join(obj.cache, '_npx') flatOptions.tufCache = join(obj.cache, '_tuf') }, }), 'cache-max': new Definition('cache-max', { default: Infinity, type: Number, description: ` \`--cache-max=0\` is an alias for \`--prefer-online\` `, deprecated: ` This option has been deprecated in favor of \`--prefer-online\` `, flatten (key, obj, flatOptions) { if (obj[key] <= 0) { flatOptions.preferOnline = true } }, }), 'cache-min': new Definition('cache-min', { default: 0, type: Number, description: ` \`--cache-min=9999 (or bigger)\` is an alias for \`--prefer-offline\`. `, deprecated: ` This option has been deprecated in favor of \`--prefer-offline\`. `, flatten (key, obj, flatOptions) { if (obj[key] >= 9999) { flatOptions.preferOffline = true } }, }), cafile: new Definition('cafile', { default: null, type: path, description: ` A path to a file containing one or multiple Certificate Authority signing certificates. Similar to the \`ca\` setting, but allows for multiple CA's, as well as for the CA information to be stored in a file on disk. `, flatten (key, obj, flatOptions) { // always set to null in defaults if (!obj.cafile) { return } const raw = maybeReadFile(obj.cafile) if (!raw) { return } const delim = '-----END CERTIFICATE-----' flatOptions.ca = raw.replace(/\r\n/g, '\n').split(delim) .filter(section => section.trim()) .map(section => section.trimLeft() + delim) }, }), call: new Definition('call', { default: '', type: String, short: 'c', description: ` Optional companion option for \`npm exec\`, \`npx\` that allows for specifying a custom command to be run along with the installed packages. \`\`\`bash npm exec --package yo --package generator-node --call "yo node" \`\`\` `, flatten, }), cert: new Definition('cert', { default: null, type: [null, String], description: ` A client certificate to pass when accessing the registry. Values should be in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines replaced by the string "\\n". For example: \`\`\`ini cert="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----" \`\`\` It is _not_ the path to a certificate file, though you can set a registry-scoped "certfile" path like "//other-registry.tld/:certfile=/path/to/cert.pem". `, deprecated: ` \`key\` and \`cert\` are no longer used for most registry operations. Use registry scoped \`keyfile\` and \`certfile\` instead. Example: //other-registry.tld/:keyfile=/path/to/key.pem //other-registry.tld/:certfile=/path/to/cert.crt `, flatten, }), cidr: new Definition('cidr', { default: null, type: [null, String, Array], description: ` This is a list of CIDR address to be used when configuring limited access tokens with the \`npm token create\` command. `, flatten, }), // This should never be directly used, the flattened value is the derived value // and is sent to other modules, and is also exposed as `npm.color` for use // inside npm itself. color: new Definition('color', { default: !process.env.NO_COLOR || process.env.NO_COLOR === '0', usage: '--color|--no-color|--color always', defaultDescription: ` true unless the NO_COLOR environ is set to something other than '0' `, type: ['always', Boolean], description: ` If false, never shows colors. If \`"always"\` then always shows colors. If true, then only prints color codes for tty file descriptors. `, flatten (key, obj, flatOptions) { flatOptions.color = !obj.color ? false : obj.color === 'always' ? true : !!process.stdout.isTTY flatOptions.logColor = !obj.color ? false : obj.color === 'always' ? true : !!process.stderr.isTTY }, }), 'commit-hooks': new Definition('commit-hooks', { default: true, type: Boolean, description: ` Run git commit hooks when using the \`npm version\` command. `, flatten, }), cpu: new Definition('cpu', { default: null, type: [null, String], description: ` Override CPU architecture of native modules to install. Acceptable values are same as \`cpu\` field of package.json, which comes from \`process.arch\`. `, flatten, }), depth: new Definition('depth', { default: null, defaultDescription: ` \`Infinity\` if \`--all\` is set, otherwise \`1\` `, type: [null, Number], description: ` The depth to go when recursing packages for \`npm ls\`. If not set, \`npm ls\` will show only the immediate dependencies of the root project. If \`--all\` is set, then npm will show all dependencies by default. `, flatten, }), description: new Definition('description', { default: true, type: Boolean, usage: '--no-description', description: ` Show the description in \`npm search\` `, flatten (key, obj, flatOptions) { flatOptions.search = flatOptions.search || { limit: 20 } flatOptions.search[key] = obj[key] }, }), dev: new Definition('dev', { default: false, type: Boolean, description: ` Alias for \`--include=dev\`. `, deprecated: 'Please use --include=dev instead.', flatten (key, obj, flatOptions) { definitions.omit.flatten('omit', obj, flatOptions) }, }), diff: new Definition('diff', { default: [], hint: '<package-spec>', type: [String, Array], description: ` Define arguments to compare in \`npm diff\`. `, flatten, }), 'diff-ignore-all-space': new Definition('diff-ignore-all-space', { default: false, type: Boolean, description: ` Ignore whitespace when comparing lines in \`npm diff\`. `, flatten, }), 'diff-name-only': new Definition('diff-name-only', { default: false, type: Boolean, description: ` Prints only filenames when using \`npm diff\`. `, flatten, }), 'diff-no-prefix': new Definition('diff-no-prefix', { default: false, type: Boolean, description: ` Do not show any source or destination prefix in \`npm diff\` output. Note: this causes \`npm diff\` to ignore the \`--diff-src-prefix\` and \`--diff-dst-prefix\` configs. `, flatten, }), 'diff-dst-prefix': new Definition('diff-dst-prefix', { default: 'b/', hint: '<path>', type: String, description: ` Destination prefix to be used in \`npm diff\` output. `, flatten, }), 'diff-src-prefix': new Definition('diff-src-prefix', { default: 'a/', hint: '<path>', type: String, description: ` Source prefix to be used in \`npm diff\` output. `, flatten, }), 'diff-text': new Definition('diff-text', { default: false, type: Boolean, description: ` Treat all files as text in \`npm diff\`. `, flatten, }), 'diff-unified': new Definition('diff-unified', { default: 3, type: Number, description: ` The number of lines of context to print in \`npm diff\`. `, flatten, }), 'dry-run': new Definition('dry-run', { default: false, type: Boolean, description: ` Indicates that you don't want npm to make any changes and that it should only report what it would have done. This can be passed into any of the commands that modify your local installation, eg, \`install\`, \`update\`, \`dedupe\`, \`uninstall\`, as well as \`pack\` and \`publish\`. Note: This is NOT honored by other network related commands, eg \`dist-tags\`, \`owner\`, etc. `, flatten, }), editor: new Definition('editor', { default: editor, defaultDescription: ` The EDITOR or VISUAL environment variables, or '%SYSTEMROOT%\\notepad.exe' on Windows, or 'vi' on Unix systems `, type: String, description: ` The command to run for \`npm edit\` and \`npm config edit\`. `, flatten, }), 'engine-strict': new Definition('engine-strict', { default: false, type: Boolean, description: ` If set to true, then npm will stubbornly refuse to install (or even consider installing) any package that claims to not be compatible with the current Node.js version. This can be overridden by setting the \`--force\` flag. `, flatten, }), 'expect-result-count': new Definition('expect-result-count', { default: null, type: [null, Number], hint: '<count>', exclusive: ['expect-results'], description: ` Tells to expect a specific number of results from the command. `, }), 'expect-results': new Definition('expect-results', { default: null, type: [null, Boolean], exclusive: ['expect-result-count'], description: ` Tells npm whether or not to expect results from the command. Can be either true (expect some results) or false (expect no results). `, }), 'fetch-retries': new Definition('fetch-retries', { default: 2, type: Number, description: ` The "retries" config for the \`retry\` module to use when fetching packages from the registry. npm will retry idempotent read requests to the registry in the case of network failures or 5xx HTTP errors. `, flatten (key, obj, flatOptions) { flatOptions.retry = flatOptions.retry || {} flatOptions.retry.retries = obj[key] }, }), 'fetch-retry-factor': new Definition('fetch-retry-factor', { default: 10, type: Number, description: ` The "factor" config for the \`retry\` module to use when fetching packages. `, flatten (key, obj, flatOptions) { flatOptions.retry = flatOptions.retry || {} flatOptions.retry.factor = obj[key] }, }), 'fetch-retry-maxtimeout': new Definition('fetch-retry-maxtimeout', { default: 60000, defaultDescription: '60000 (1 minute)', type: Number, description: ` The "maxTimeout" config for the \`retry\` module to use when fetching packages. `, flatten (key, obj, flatOptions) { flatOptions.retry = flatOptions.retry || {} flatOptions.retry.maxTimeout = obj[key] }, }), 'fetch-retry-mintimeout': new Definition('fetch-retry-mintimeout', { default: 10000, defaultDescription: '10000 (10 seconds)', type: Number, description: ` The "minTimeout" config for the \`retry\` module to use when fetching packages. `, flatten (key, obj, flatOptions) { flatOptions.retry = flatOptions.retry || {} flatOptions.retry.minTimeout = obj[key] }, }), 'fetch-timeout': new Definition('fetch-timeout', { default: 5 * 60 * 1000, defaultDescription: `${5 * 60 * 1000} (5 minutes)`, type: Number, description: ` The maximum amount of time to wait for HTTP requests to complete. `, flatten (key, obj, flatOptions) { flatOptions.timeout = obj[key] }, }), force: new Definition('force', { default: false, type: Boolean, short: 'f', description: ` Removes various protections against unfortunate side effects, common mistakes, unnecessary performance degradation, and malicious input. * Allow clobbering non-npm files in global installs. * Allow the \`npm version\` command to work on an unclean git repository. * Allow deleting the cache folder with \`npm cache clean\`. * Allow installing packages that have an \`engines\` declaration requiring a different version of npm. * Allow installing packages that have an \`engines\` declaration requiring a different version of \`node\`, even if \`--engine-strict\` is enabled. * Allow \`npm audit fix\` to install modules outside your stated dependency range (including SemVer-major changes). * Allow unpublishing all versions of a published package. * Allow conflicting peerDependencies to be installed in the root project. * Implicitly set \`--yes\` during \`npm init\`. * Allow clobbering existing values in \`npm pkg\` * Allow unpublishing of entire packages (not just a single version). If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! `, flatten, }), 'foreground-scripts': new Definition('foreground-scripts', { default: false, defaultDescription: `\`false\` unless when using \`npm pack\` or \`npm publish\` where it defaults to \`true\``, type: Boolean, description: ` Run all build scripts (ie, \`preinstall\`, \`install\`, and \`postinstall\`) scripts for installed packages in the foreground process, sharing standard input, output, and error with the main npm process. Note that this will generally make installs run slower, and be much noisier, but can be useful for debugging. `, flatten, }), 'format-package-lock': new Definition('format-package-lock', { default: true, type: Boolean, description: ` Format \`package-lock.json\` or \`npm-shrinkwrap.json\` as a human readable file. `, flatten, }), fund: new Definition('fund', { default: true, type: Boolean, description: ` When "true" displays the message at the end of each \`npm install\` acknowledging the number of dependencies looking for funding. See [\`npm fund\`](/commands/npm-fund) for details. `, flatten, }), git: new Definition('git', { default: 'git', type: String, description: ` The command to use for git commands. If git is installed on the computer, but is not in the \`PATH\`, then set this to the full path to the git binary. `, flatten, }), 'git-tag-version': new Definition('git-tag-version', { default: true, type: Boolean, description: ` Tag the commit when using the \`npm version\` command. Setting this to false results in no commit being made at all. `, flatten, }), global: new Definition('global', { default: false, type: Boolean, short: 'g', description: ` Operates in "global" mode, so that packages are installed into the \`prefix\` folder instead of the current working directory. See [folders](/configuring-npm/folders) for more on the differences in behavior. * packages are installed into the \`{prefix}/lib/node_modules\` folder, instead of the current working directory. * bin files are linked to \`{prefix}/bin\` * man pages are linked to \`{prefix}/share/man\` `, flatten: (key, obj, flatOptions) => { flatten(key, obj, flatOptions) if (flatOptions.global) { flatOptions.location = 'global' } }, }), // the globalconfig has its default defined outside of this module globalconfig: new Definition('globalconfig', { type: path, default: '', defaultDescription: ` The global --prefix setting plus 'etc/npmrc'. For example, '/usr/local/etc/npmrc' `, description: ` The config file to read for global config options. `, flatten, }), 'global-style': new Definition('global-style', { default: false, type: Boolean, description: ` Only install direct dependencies in the top level \`node_modules\`, but hoist on deeper dependencies. Sets \`--install-strategy=shallow\`. `, deprecated: ` This option has been deprecated in favor of \`--install-strategy=shallow\` `, flatten (key, obj, flatOptions) { if (obj[key]) { obj['install-strategy'] = 'shallow' flatOptions.installStrategy = 'shallow' } }, }), heading: new Definition('heading', { default: 'npm', type: String, description: ` The string that starts all the debugging log output. `, flatten, }), 'https-proxy': new Definition('https-proxy', { default: null, type: [null, url], description: ` A proxy to use for outgoing https requests. If the \`HTTPS_PROXY\` or \`https_proxy\` or \`HTTP_PROXY\` or \`http_proxy\` environment variables are set, proxy settings will be honored by the underlying \`make-fetch-happen\` library. `, flatten, }), 'if-present': new Definition('if-present', { default: false, type: Boolean, envExport: false, description: ` If true, npm will not exit with an error code when \`run-script\` is invoked for a script that isn't defined in the \`scripts\` section of \`package.json\`. This option can be used when it's desirable to optionally run a script when it's present and fail if the script fails. This is useful, for example, when running scripts that may only apply for some builds in an otherwise generic CI setup. `, flatten, }), 'ignore-scripts': new Definition('ignore-scripts', { default: false, type: Boolean, description: ` If true, npm does not run scripts specified in package.json files. Note that commands explicitly intended to run a particular script, such as \`npm start\`, \`npm stop\`, \`npm restart\`, \`npm test\`, and \`npm run-script\` will still run their intended script if \`ignore-scripts\` is set, but they will *not* run any pre- or post-scripts. `, flatten, }), include: new Definition('include', { default: [], type: [Array, 'prod', 'dev', 'optional', 'peer'], description: ` Option that allows for defining which types of dependencies to install. This is the inverse of \`--omit=<type>\`. Dependency types specified in \`--include\` will not be omitted, regardless of the order in which omit/include are specified on the command-line. `, flatten (key, obj, flatOptions) { // just call the omit flattener, it reads from obj.include definitions.omit.flatten('omit', obj, flatOptions) }, }), 'include-staged': new Definition('include-staged', { default: false, type: Boolean, description: ` Allow installing "staged" published packages, as defined by [npm RFC PR #92](https://github.com/npm/rfcs/pull/92). This is experimental, and not implemented by the npm public registry. `, flatten, }), 'include-workspace-root': new Definition('include-workspace-root', { default: false, type: Boolean, envExport: false, description: ` Include the workspace root when workspaces are enabled for a command. When false, specifying individual workspaces via the \`workspace\` config, or all workspaces via the \`workspaces\` flag, will cause npm to operate only on the specified workspaces, and not on the root project. `, flatten, }), 'init-author-email': new Definition('init-author-email', { default: '', hint: '<email>', type: String, description: ` The value \`npm init\` should use by default for the package author's email. `, }), 'init-author-name': new Definition('init-author-name', { default: '', hint: '<name>', type: String, description: ` The value \`npm init\` should use by default for the package author's name. `, }), 'init-author-url': new Definition('init-author-url', { default: '', type: ['', url], hint: '<url>', description: ` The value \`npm init\` should use by default for the package author's homepage. `, }), 'init-license': new Definition('init-license', { default: 'ISC', hint: '<license>', type: String, description: ` The value \`npm init\` should use by default for the package license. `, }), 'init-module': new Definition('init-module', { default: '~/.npm-init.js', type: path, hint: '<module>', description: ` A module that will be loaded by the \`npm init\` command. See the documentation for the [init-package-json](https://github.com/npm/init-package-json) module for more information, or [npm init](/commands/npm-init). `, }), 'init-version': new Definition('init-version', { default: '1.0.0', type: Semver, hint: '<version>', description: ` The value that \`npm init\` should use by default for the package version number, if not already set in package.json. `, }), // these "aliases" are historically supported in .npmrc files, unfortunately // They should be removed in a future npm version. 'init.author.email': new Definition('init.author.email', { default: '', type: String, deprecated: ` Use \`--init-author-email\` instead.`, description: ` Alias for \`--init-author-email\` `, }), 'init.author.name': new Definition('init.author.name', { default: '', type: String, deprecated: ` Use \`--init-author-name\` instead. `, description: ` Alias for \`--init-author-name\` `, }), 'init.author.url': new Definition('init.author.url', { default: '', type: ['', url], deprecated: ` Use \`--init-author-url\` instead. `, description: ` Alias for \`--init-author-url\` `, }), 'init.license': new Definition('init.license', { default: 'ISC', type: String, deprecated: ` Use \`--init-license\` instead. `, description: ` Alias for \`--init-license\` `, }), 'init.module': new Definition('init.module', { default: '~/.npm-init.js', type: path, deprecated: ` Use \`--init-module\` instead. `, description: ` Alias for \`--init-module\` `, }), 'init.version': new Definition('init.version', { default: '1.0.0', type: Semver, deprecated: ` Use \`--init-version\` instead. `, description: ` Alias for \`--init-version\` `, }), 'install-links': new Definition('install-links', { default: false, type: Boolean, description: ` When set file: protocol dependencies will be packed and installed as regular dependencies instead of creating a symlink. This option has no effect on workspaces. `, flatten, }), 'install-strategy': new Definition('install-strategy', { default: 'hoisted', type: ['hoisted', 'nested', 'shallow', 'linked'], description: ` Sets the strategy for installing packages in node_modules. hoisted (default): Install non-duplicated in top-level, and duplicated as necessary within directory structure. nested: (formerly --legacy-bundling) install in place, no hoisting. shallow (formerly --global-style) only install direct deps at top-level. linked: (experimental) install in node_modules/.store, link in place, unhoisted. `, flatten, }), json: new Definition('json', { default: false, type: Boolean, description: ` Whether or not to output JSON data, rather than the normal output. * In \`npm pkg set\` it enables parsing set values with JSON.parse() before saving them to your \`package.json\`. Not supported by all npm commands. `, flatten, }), key: new Definition('key', { default: null, type: [null, String], description: ` A client key to pass when accessing the registry. Values should be in PEM format with newlines replaced by the string "\\n". For example: \`\`\`ini key="-----BEGIN PRIVATE KEY-----\\nXXXX\\nXXXX\\n-----END PRIVATE KEY-----" \`\`\` It is _not_ the path to a key file, though you can set a registry-scoped "keyfile" path like "//other-registry.tld/:keyfile=/path/to/key.pem". `, deprecated: ` \`key\` and \`cert\` are no longer used for most registry operations. Use registry scoped \`keyfile\` and \`certfile\` instead. Example: //other-registry.tld/:keyfile=/path/to/key.pem //other-registry.tld/:certfile=/path/to/cert.crt `, flatten, }), 'legacy-bundling': new Definition('legacy-bundling', { default: false, type: Boolean, description: ` Instead of hoisting package installs in \`node_modules\`, install packages in the same manner that they are depended on. This may cause very deep directory structures and duplicate package installs as there is no de-duplicating. Sets \`--install-strategy=nested\`. `, deprecated: ` This option has been deprecated in favor of \`--install-strategy=nested\` `, flatten (key, obj, flatOptions) { if (obj[key]) { obj['install-strategy'] = 'nested' flatOptions.installStrategy = 'nested' } }, }), 'legacy-peer-deps': new Definition('legacy-peer-deps', { default: false, type: Boolean, description: ` Causes npm to completely ignore \`peerDependencies\` when building a package tree, as in npm versions 3 through 6. If a package cannot be installed because of overly strict \`peerDependencies\` that collide, it provides a way to move forward resolving the situation. This differs from \`--omit=peer\`, in that \`--omit=peer\` will avoid unpacking \`peerDependencies\` on disk, but will still design a tree such that \`peerDependencies\` _could_ be unpacked in a correct place. Use of \`legacy-peer-deps\` is not recommended, as it will not enforce the \`peerDependencies\` contract that meta-dependencies may rely on. `, flatten, }), libc: new Definition('libc', { default: null, type: [null, String], description: ` Override libc of native modules to install. Acceptable values are same as \`libc\` field of package.json `, flatten, }), link: new Definition('link', { default: false, type: Boolean, description: ` Used with \`npm ls\`, limiting output to only those packages that are linked. `, }), 'local-address': new Definition('local-address', { default: null, type: getLocalAddresses(), typeDescription: 'IP Address', description: ` The IP address of the local interface to use when making connections to the npm registry. Must be IPv4 in versions of Node prior to 0.12. `, flatten, }), location: new Definition('location', { default: 'user', short: 'L', type: [ 'global', 'user', 'project', ], defaultDescription: ` "user" unless \`--global\` is passed, which will also set this value to "global" `, description: ` When passed to \`npm config\` this refers to which config file to use. When set to "global" mode, packages are installed into the \`prefix\` folder instead of the current working directory. See [folders](/configuring-npm/folders) for more on the differences in behavior. * packages are installed into the \`{prefix}/lib/node_modules\` folder, instead of the current working directory. * bin files are linked to \`{prefix}/bin\` * man pages are linked to \`{prefix}/share/man\` `, flatten: (key, obj, flatOptions) => { flatten(key, obj, flatOptions) if (flatOptions.global) { flatOptions.location = 'global' } if (obj.location === 'global') { flatOptions.global = true } }, }), 'lockfile-version': new Definition('lockfile-version', { default: null, type: [null, 1, 2, 3, '1', '2', '3'], defaultDescription: ` Version 3 if no lockfile, auto-converting v1 lockfiles to v3, otherwise maintain current lockfile version.`, description: ` Set the lockfile format version to be used in package-lock.json and npm-shrinkwrap-json files. Possible options are: 1: The lockfile version used by npm versions 5 and 6. Lacks some data that is used during the install, resulting in slower and possibly less deterministic installs. Prevents lockfile churn when interoperating with older npm versions. 2: The default lockfile version used by npm version 7 and 8. Includes both the version 1 lockfile data and version 3 lockfile data, for maximum determinism and interoperability, at the expense of more bytes on disk. 3: Only the new lockfile information introduced in npm version 7. Smaller on disk than lockfile version 2, but not interoperable with older npm versions. Ideal if all users are on npm version 7 and higher. `, flatten: (key, obj, flatOptions) => { flatOptions.lockfileVersion = obj[key] && parseInt(obj[key], 10) }, }), loglevel: new Definition('loglevel', { default: 'notice', type: [ 'silent', 'error', 'warn', 'notice', 'http', 'info', 'verbose', 'silly', ], description: ` What level of logs to report. All logs are written to a debug log, with the path to that file printed if the execution of a command fails. Any logs of a higher level than the setting are shown. The default is "notice". See also the \`foreground-scripts\` config. `, flatten (key, obj, flatOptions) { flatOptions.silent = obj[key] === 'silent' }, }), 'logs-dir': new Definition('logs-dir', { default: null, type: [null, path], defaultDescription: ` A directory named \`_logs\` inside the cache `, description: ` The location of npm's log directory. See [\`npm logging\`](/using-npm/logging) for more information. `, }), 'logs-max': new Definition('logs-max', { default: 10, type: Number, description: ` The maximum number of log files to store. If set to 0, no log files will be written for the current run. `, }), long: new Definition('long', { default: false, type: Boolean, short: 'l', description: ` Show extended information in \`ls\`, \`search\`, and \`help-search\`. `, }), maxsockets: new Definition('maxsockets', { default: 15, type: Number, description: ` The maximum number of connections to use per origin (protocol/host/port combination). `, flatten (key, obj, flatOptions) { flatOptions.maxSockets = obj[key] }, }), message: new Definition('message', { default: '%s', type: String, short: 'm', description: ` Commit message which is used by \`npm version\` when creating version commit. Any "%s" in the message will be replaced with the version number. `, flatten, }), 'node-options': new Definition('node-options', { default: null, type: [null, String], description: ` Options to pass through to Node.js via the \`NODE_OPTIONS\` environment variable. This does not impact how npm itself is executed but it does impact how lifecycle scripts are called. `, }), noproxy: new Definition('noproxy', { default: '', defaultDescription: ` The value of the NO_PROXY environment variable `, type: [String, Array], description: ` Domain extensions that should bypass any proxies. Also accepts a comma-delimited string. `, flatten (key, obj, flatOptions) { if (Array.isArray(obj[key])) { flatOptions.noProxy = obj[key].join(',') } else { flatOptions.noProxy = obj[key] } }, }), offline: new Definition('offline', { default: false, type: Boolean, description: ` Force offline mode: no network requests will be done during install. To allow the CLI to fill in missing cache data, see \`--prefer-offline\`. `, flatten, }), omit: new Definition('omit', { default: process.env.NODE_ENV === 'production' ? ['dev'] : [], defaultDescription: ` 'dev' if the \`NODE_ENV\` environment variable is set to 'production', otherwise empty. `, type: [Array, 'dev', 'optional', 'peer'], description: ` Dependency types to omit from the installation tree on disk. Note that these dependencies _are_ still resolved and added to the \`package-lock.json\` or \`npm-shrinkwrap.json\` file. They are just not physically installed on disk. If a package type appears in both the \`--include\` and \`--omit\` lists, then it will be included. If the resulting omit list includes \`'dev'\`, then the \`NODE_ENV\` environment variable will be set to \`'production'\` for all lifecycle scripts. `, flatten (key, obj, flatOptions) { flatOptions.omit = buildOmitList(obj) }, }), 'omit-lockfile-registry-resolved': new Definition('omit-lockfile-registry-resolved', { default: false, type: Boolean, description: ` This option causes npm to create lock files without a \`resolved\` key for registry dependencies. Subsequent installs will need to resolve tarball endpoints with the configured registry, likely resulting in a longer install time. `, flatten, }), only: new Definition('only', { default: null, type: [null, 'prod', 'production'], deprecated: ` Use \`--omit=dev\` to omit dev dependencies from the install. `, description: ` When set to \`prod\` or \`production\`, this is an alias for \`--omit=dev\`. `, flatten (key, obj, flatOptions) { definitions.omit.flatten('omit', obj, flatOptions) }, }), optional: new Definition('optional', { default: null, type: [null, Boolean], deprecated: ` Use \`--omit=optional\` to exclude optional dependencies, or \`--include=optional\` to include them. Default value does install optional deps unless otherwise omitted. `, description: ` Alias for --include=optional or --omit=optional `, flatten (key, obj, flatOptions) { definitions.omit.flatten('omit', obj, flatOptions) }, }), os: new Definition('os', { default: null, type: [null, String], description: ` Override OS of native modules to install. Acceptable values are same as \`os\` field of package.json, which comes from \`process.platform\`. `, flatten, }), otp: new Definition('otp', { default: null, type: [null, String], description: ` This is a one-time password from a two-factor authenticator. It's needed when publishing or changing package permissions with \`npm access\`. If not set, and a registry response fails with a challenge for a one-time password, npm will prompt on the command line for one. `, flatten (key, obj, flatOptions) { flatten(key, obj, flatOptions) if (obj.otp) { obj['auth-type'] = 'legacy' flatten('auth-type', obj, flatOptions) } }, }), package: new Definition('package', { default: [], hint: '<package-spec>', type: [String, Array], description: ` The package or packages to install for [\`npm exec\`](/commands/npm-exec) `, flatten, }), 'package-lock': new Definition('package-lock', { default: true, type: Boolean, description: ` If set to false, then ignore \`package-lock.json\` files when installing. This will also prevent _writing_ \`package-lock.json\` if \`save\` is true. `, flatten: (key, obj, flatOptions) => { flatten(key, obj, flatOptions) if (flatOptions.packageLockOnly) { flatOptions.packageLock = true } }, }), 'package-lock-only': new Definition('package-lock-only', { default: false, type: Boolean, description: ` If set to true, the current operation will only use the \`package-lock.json\`, ignoring \`node_modules\`. For \`update\` this means only the \`package-lock.json\` will be updated, instead of checking \`node_modules\` and downloading dependencies. For \`list\` this means the output will be based on the tree described by the \`package-lock.json\`, rather than the contents of \`node_modules\`. `, flatten: (key, obj, flatOptions) => { flatten(key, obj, flatOptions) if (flatOptions.packageLockOnly) { flatOptions.packageLock = true } }, }), 'pack-destination': new Definition('pack-destination', { default: '.', type: String, description: ` Directory in which \`npm pack\` will save tarballs. `, flatten, }), parseable: new Definition('parseable', { default: false, type: Boolean, short: 'p', description: ` Output parseable results from commands that write to standard output. For \`npm search\`, this will be tab-separated table format. `, flatten, }), 'prefer-dedupe': new Definition('prefer-dedupe', { default: false, type: Boolean, description: ` Prefer to deduplicate packages if possible, rather than choosing a newer version of a dependency. `, flatten, }), 'prefer-offline': new Definition('prefer-offline', { default: false, type: Boolean, description: ` If true, staleness checks for cached data will be bypassed, but missing data will be requested from the server. To force full offline mode, use \`--offline\`. `, flatten, }), 'prefer-online': new Definition('prefer-online', { default: false, type: Boolean, description: ` If true, staleness checks for cached data will be forced, making the CLI look for updates immediately even for fresh package data. `, flatten, }), // `prefix` has its default defined outside of this module prefix: new Definition('prefix', { type: path, short: 'C', default: '', defaultDescription: ` In global mode, the folder where the node executable is installed. Otherwise, the nearest parent folder containing either a package.json file or a node_modules folder. `, description: ` The location to install global items. If set on the command line, then it forces non-global commands to run in the specified folder. `, }), preid: new Definition('preid', { default: '', hint: 'prerelease-id', type: String, description: ` The "prerelease identifier" to use as a prefix for the "prerelease" part of a semver. Like the \`rc\` in \`1.2.0-rc.8\`. `, flatten, }), production: new Definition('production', { default: null, type: [null, Boolean], deprecated: 'Use `--omit=dev` instead.', description: 'Alias for `--omit=dev`', flatten (key, obj, flatOptions) { definitions.omit.flatten('omit', obj, flatOptions) }, }), progress: new Definition('progress', { default: !ciInfo.isCI, defaultDescription: ` \`true\` unless running in a known CI system `, type: Boolean, description: ` When set to \`true\`, npm will display a progress bar during time intensive operations, if \`process.stderr\` and \`process.stdout\` are a TTY. Set to \`false\` to suppress the progress bar. `, flatten (key, obj, flatOptions) { flatOptions.progress = !obj.progress ? false // progress is only written to stderr but we disable it unless stdout is a tty // also. This prevents the progress from appearing when piping output to another // command which doesn't break anything, but does look very odd to users. : !!process.stderr.isTTY && !!process.stdout.isTTY && process.env.TERM !== 'dumb' }, }), provenance: new Definition('provenance', { default: false, type: Boolean, exclusive: ['provenance-file'], description: ` When publishing from a supported cloud CI/CD system, the package will be publicly linked to where it was built and published from. `, flatten, }), 'provenance-file': new Definition('provenance-file', { default: null, type: path, hint: '<file>', exclusive: ['provenance'], description: ` When publishing, the provenance bundle at the given path will be used. `, flatten, }), proxy: new Definition('proxy', { default: null, type: [null, false, url], // allow proxy to be disabled explicitly description: ` A proxy to use for outgoing http requests. If the \`HTTP_PROXY\` or \`http_proxy\` environment variables are set, proxy settings will be honored by the underlying \`request\` library. `, flatten, }), 'read-only': new Definition('read-only', { default: false, type: Boolean, description: ` This is used to mark a token as unable to publish when configuring limited access tokens with the \`npm token create\` command. `, flatten, }), 'rebuild-bundle': new Definition('rebuild-bundle', { default: true, type: Boolean, description: ` Rebuild bundled dependencies after installation. `, flatten, }), registry: new Definition('registry', { default: 'https://registry.npmjs.org/', type: url, description: ` The base URL of the npm registry. `, flatten, }), 'replace-registry-host': new Definition('replace-registry-host', { default: 'npmjs', hint: '<npmjs|never|always> | hostname', type: ['npmjs', 'never', 'always', String], description: ` Defines behavior for replacing the registry host in a lockfile with the configured registry. The default behavior is to replace package dist URLs from the default registry (https://registry.npmjs.org) to the configured registry. If set to "never", then use the registry value. If set to "always", then replace the registry host with the configured host every time. You may also specify a bare hostname (e.g., "registry.npmjs.org"). `, flatten, }), save: new Definition('save', { default: true, defaultDescription: `\`true\` unless when using \`npm update\` where it defaults to \`false\``, usage: '-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer|--save-bundle', type: Boolean, short: 'S', description: ` Save installed packages to a \`package.json\` file as dependencies. When used with the \`npm rm\` command, removes the dependency from \`package.json\`. Will also prevent writing to \`package-lock.json\` if set to \`false\`. `, flatten, }), 'save-bundle': new Definition('save-bundle', { default: false, type: Boolean, short: 'B', description: ` If a package would be saved at install time by the use of \`--save\`, \`--save-dev\`, or \`--save-optional\`, then also put it in the \`bundleDependencies\` list. Ignored if \`--save-peer\` is set, since peerDependencies cannot be bundled. `, flatten (key, obj, flatOptions) { // XXX update arborist to just ignore it if resulting saveType is peer // otherwise this won't have the expected effect: // // npm config set save-peer true // npm i foo --save-bundle --save-prod <-- should bundle flatOptions.saveBundle = obj['save-bundle'] && !obj['save-peer'] }, }), 'save-dev': new Definition('save-dev', { default: false, type: Boolean, short: 'D', description: ` Save installed packages to a package.json file as \`devDependencies\`. `, flatten (key, obj, flatOptions) { if (!obj[key]) { if (flatOptions.saveType === 'dev') { delete flatOptions.saveType } return } flatOptions.saveType = 'dev' }, }), 'save-exact': new Definition('save-exact', { default: false, type: Boolean, short: 'E', description: ` Dependencies saved to package.json will be configured with an exact version rather than using npm's default semver range operator. `, flatten (key, obj, flatOptions) { // just call the save-prefix flattener, it reads from obj['save-exact'] definitions['save-prefix'].flatten('save-prefix', obj, flatOptions) }, }), 'save-optional': new Definition('save-optional', { default: false, type: Boolean, short: 'O', description: ` Save installed packages to a package.json file as \`optionalDependencies\`. `, flatten (key, obj, flatOptions) { if (!obj[key]) { if (flatOptions.saveType === 'optional') { delete flatOptions.saveType } else if (flatOptions.saveType === 'peerOptional') { flatOptions.saveType = 'peer' } return } if (flatOptions.saveType === 'peerOptional') { return } if (flatOptions.saveType === 'peer') { flatOptions.saveType = 'peerOptional' } else { flatOptions.saveType = 'optional' } }, }), 'save-peer': new Definition('save-peer', { default: false, type: Boolean, description: ` Save installed packages to a package.json file as \`peerDependencies\` `, flatten (key, obj, flatOptions) { if (!obj[key]) { if (flatOptions.saveType === 'peer') { delete flatOptions.saveType } else if (flatOptions.saveType === 'peerOptional') { flatOptions.saveType = 'optional' } return } if (flatOptions.saveType === 'peerOptional') { return } if (flatOptions.saveType === 'optional') { flatOptions.saveType = 'peerOptional' } else { flatOptions.saveType = 'peer' } }, }), 'save-prefix': new Definition('save-prefix', { default: '^', type: String, description: ` Configure how versions of packages installed to a package.json file via \`--save\` or \`--save-dev\` get prefixed. For example if a package has version \`1.2.3\`, by default its version is set to \`^1.2.3\` which allows minor upgrades for that package, but after \`npm config set save-prefix='~'\` it would be set to \`~1.2.3\` which only allows patch upgrades. `, flatten (key, obj, flatOptions) { flatOptions.savePrefix = obj['save-exact'] ? '' : obj['save-prefix'] obj['save-prefix'] = flatOptions.savePrefix }, }), 'save-prod': new Definition('save-prod', { default: false, type: Boolean, short: 'P', description: ` Save installed packages into \`dependencies\` specifically. This is useful if a package already exists in \`devDependencies\` or \`optionalDependencies\`, but you want to move it to be a non-optional production dependency. This is the default behavior if \`--save\` is true, and neither \`--save-dev\` or \`--save-optional\` are true. `, flatten (key, obj, flatOptions) { if (!obj[key]) { if (flatOptions.saveType === 'prod') { delete flatOptions.saveType } return } flatOptions.saveType = 'prod' }, }), 'sbom-format': new Definition('sbom-format', { default: null, type: [ 'cyclonedx', 'spdx', ], description: ` SBOM format to use when generating SBOMs. `, flatten, }), 'sbom-type': new Definition('sbom-type', { default: 'library', type: [ 'library', 'application', 'framework', ], description: ` The type of package described by the generated SBOM. For SPDX, this is the value for the \`primaryPackagePurpose\` field. For CycloneDX, this is the value for the \`type\` field. `, flatten, }), scope: new Definition('scope', { default: '', defaultDescription: ` the scope of the current project, if any, or "" `, type: String, hint: '<@scope>', description: ` Associate an operation with a scope for a scoped registry. Useful when logging in to or out of a private registry: \`\`\` # log in, linking the scope to the custom registry npm login --scope=@mycorp --registry=https://registry.mycorp.com # log out, removing the link and the auth token npm logout --scope=@mycorp \`\`\` This will cause \`@mycorp\` to be mapped to the registry for future installation of packages specified according to the pattern \`@mycorp/package\`. This will also cause \`npm init\` to create a scoped package. \`\`\` # accept all defaults, and create a package named "@foo/whatever", # instead of just named "whatever" npm init --scope=@foo --yes \`\`\` `, flatten (key, obj, flatOptions) { const value = obj[key] const scope = value && !/^@/.test(value) ? `@${value}` : value flatOptions.scope = scope // projectScope is kept for compatibility with npm-registry-fetch flatOptions.projectScope = scope }, }), 'script-shell': new Definition('script-shell', { default: null, defaultDescription: ` '/bin/sh' on POSIX systems, 'cmd.exe' on Windows `, type: [null, String], description: ` The shell to use for scripts run with the \`npm exec\`, \`npm run\` and \`npm init <package-spec>\` commands. `, flatten (key, obj, flatOptions) { flatOptions.scriptShell = obj[key] || undefined }, }), searchexclude: new Definition('searchexclude', { default: '', type: String, description: ` Space-separated options that limit the results from search. `, flatten (key, obj, flatOptions) { flatOptions.search = flatOptions.search || { limit: 20 } flatOptions.search.exclude = obj[key].toLowerCase() }, }), searchlimit: new Definition('searchlimit', { default: 20, type: Number, description: ` Number of items to limit search results to. Will not apply at all to legacy searches. `, flatten (key, obj, flatOptions) { flatOptions.search = flatOptions.search || {} flatOptions.search.limit = obj[key] }, }), searchopts: new Definition('searchopts', { default: '', type: String, description: ` Space-separated options that are always passed to search. `, flatten (key, obj, flatOptions) { flatOptions.search = flatOptions.search || { limit: 20 } flatOptions.search.opts = querystring.parse(obj[key]) }, }), searchstaleness: new Definition('searchstaleness', { default: 15 * 60, type: Number, description: ` The age of the cache, in seconds, before another registry request is made if using legacy search endpoint. `, flatten (key, obj, flatOptions) { flatOptions.search = flatOptions.search || { limit: 20 } flatOptions.search.staleness = obj[key] }, }), shell: new Definition('shell', { default: shell, defaultDescription: ` SHELL environment variable, or "bash" on Posix, or "cmd.exe" on Windows `, type: String, description: ` The shell to run for the \`npm explore\` command. `, flatten, }), shrinkwrap: new Definition('shrinkwrap', { default: true, type: Boolean, deprecated: ` Use the --package-lock setting instead. `, description: ` Alias for --package-lock `, flatten (key, obj, flatOptions) { obj['package-lock'] = obj.shrinkwrap definitions['package-lock'].flatten('package-lock', obj, flatOptions) }, }), 'sign-git-commit': new Definition('sign-git-commit', { default: false, type: Boolean, description: ` If set to true, then the \`npm version\` command will commit the new package version using \`-S\` to add a signature. Note that git requires you to have set up GPG keys in your git configs for this to work properly. `, flatten, }), 'sign-git-tag': new Definition('sign-git-tag', { default: false, type: Boolean, description: ` If set to true, then the \`npm version\` command will tag the version using \`-s\` to add a signature. Note that git requires you to have set up GPG keys in your git configs for this to work properly. `, flatten, }), 'strict-peer-deps': new Definition('strict-peer-deps', { default: false, type: Boolean, description: ` If set to \`true\`, and \`--legacy-peer-deps\` is not set, then _any_ conflicting \`peerDependencies\` will be treated as an install failure, even if npm could reasonably guess the appropriate resolution based on non-peer dependency relationships. By default, conflicting \`peerDependencies\` deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \`peerDependencies\` object. When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \`--strict-peer-deps\` is set, then this warning is treated as a failure. `, flatten, }), 'strict-ssl': new Definition('strict-ssl', { default: true, type: Boolean, description: ` Whether or not to do SSL key validation when making requests to the registry via https. See also the \`ca\` config. `, flatten (key, obj, flatOptions) { flatOptions.strictSSL = obj[key] }, }), tag: new Definition('tag', { default: 'latest', type: String, description: ` If you ask npm to install a package and don't tell it a specific version, then it will install the specified tag. It is the tag added to the package@version specified in the \`npm dist-tag add\` command, if no explicit tag is given. When used by the \`npm diff\` command, this is the tag used to fetch the tarball that will be compared with the local files by default. If used in the \`npm publish\` command, this is the tag that will be added to the package submitted to the registry. `, flatten (key, obj, flatOptions) { flatOptions.defaultTag = obj[key] }, }), 'tag-version-prefix': new Definition('tag-version-prefix', { default: 'v', type: String, description: ` If set, alters the prefix used when tagging a new version when performing a version increment using \`npm version\`. To remove the prefix altogether, set it to the empty string: \`""\`. Because other tools may rely on the convention that npm version tags look like \`v1.0.0\`, _only use this property if it is absolutely necessary_. In particular, use care when overriding this setting for public packages. `, flatten, }), timing: new Definition('timing', { default: false, type: Boolean, description: ` If true, writes timing information to a process specific json file in the cache or \`logs-dir\`. The file name ends with \`-timing.json\`. You can quickly view it with this [json](https://npm.im/json) command line: \`cat ~/.npm/_logs/*-timing.json | npm exec -- json -g\`. Timing information will also be reported in the terminal. To suppress this while still writing the timing file, use \`--silent\`. `, }), umask: new Definition('umask', { default: 0, type: Umask, description: ` The "umask" value to use when setting the file creation mode on files and folders. Folders and executables are given a mode which is \`0o777\` masked against this value. Other files are given a mode which is \`0o666\` masked against this value. Note that the underlying system will _also_ apply its own umask value to files and folders that are created, and npm does not circumvent this, but rather adds the \`--umask\` config to it. Thus, the effective default umask value on most POSIX systems is 0o22, meaning that folders and executables are created with a mode of 0o755 and other files are created with a mode of 0o644. `, flatten, }), unicode: new Definition('unicode', { default: unicode, defaultDescription: ` false on windows, true on mac/unix systems with a unicode locale, as defined by the \`LC_ALL\`, \`LC_CTYPE\`, or \`LANG\` environment variables. `, type: Boolean, description: ` When set to true, npm uses unicode characters in the tree output. When false, it uses ascii characters instead of unicode glyphs. `, flatten, }), 'update-notifier': new Definition('update-notifier', { default: true, type: Boolean, description: ` Set to false to suppress the update notification when using an older version of npm than the latest. `, }), usage: new Definition('usage', { default: false, type: Boolean, short: ['?', 'H', 'h'], description: ` Show short usage output about the command specified. `, }), 'user-agent': new Definition('user-agent', { default: 'npm/{npm-version} ' + 'node/{node-version} ' + '{platform} ' + '{arch} ' + 'workspaces/{workspaces} ' + '{ci}', type: String, description: ` Sets the User-Agent request header. The following fields are replaced with their actual counterparts: * \`{npm-version}\` - The npm version in use * \`{node-version}\` - The Node.js version in use * \`{platform}\` - The value of \`process.platform\` * \`{arch}\` - The value of \`process.arch\` * \`{workspaces}\` - Set to \`true\` if the \`workspaces\` or \`workspace\` options are set. * \`{ci}\` - The value of the \`ci-name\` config, if set, prefixed with \`ci/\`, or an empty string if \`ci-name\` is empty. `, flatten (key, obj, flatOptions) { const value = obj[key] const ciName = ciInfo.name?.toLowerCase().split(' ').join('-') || null let inWorkspaces = false if (obj.workspaces || obj.workspace && obj.workspace.length) { inWorkspaces = true } flatOptions.userAgent = value.replace(/\{node-version\}/gi, process.version) .replace(/\{npm-version\}/gi, obj['npm-version']) .replace(/\{platform\}/gi, process.platform) .replace(/\{arch\}/gi, process.arch) .replace(/\{workspaces\}/gi, inWorkspaces) .replace(/\{ci\}/gi, ciName ? `ci/${ciName}` : '') .trim() // We can't clobber the original or else subsequent flattening will fail // (i.e. when we change the underlying config values) // obj[key] = flatOptions.userAgent // user-agent is a unique kind of config item that gets set from a template // and ends up translated. Because of this, the normal "should we set this // to process.env also doesn't work process.env.npm_config_user_agent = flatOptions.userAgent }, }), userconfig: new Definition('userconfig', { default: '~/.npmrc', type: path, description: ` The location of user-level configuration settings. This may be overridden by the \`npm_config_userconfig\` environment variable or the \`--userconfig\` command line option, but may _not_ be overridden by settings in the \`globalconfig\` file. `, }), version: new Definition('version', { default: false, type: Boolean, short: 'v', description: ` If true, output the npm version and exit successfully. Only relevant when specified explicitly on the command line. `, }), versions: new Definition('versions', { default: false, type: Boolean, description: ` If true, output the npm version as well as node's \`process.versions\` map and the version in the current working directory's \`package.json\` file if one exists, and exit successfully. Only relevant when specified explicitly on the command line. `, }), viewer: new Definition('viewer', { default: isWindows ? 'browser' : 'man', defaultDescription: ` "man" on Posix, "browser" on Windows `, type: String, description: ` The program to use to view help content. Set to \`"browser"\` to view html help content in the default web browser. `, }), which: new Definition('which', { default: null, hint: '<fundingSourceNumber>', type: [null, Number], description: ` If there are multiple funding sources, which 1-indexed source URL to open. `, }), workspace: new Definition('workspace', { default: [], type: [String, Array], hint: '<workspace-name>', short: 'w', envExport: false, description: ` Enable running a command in the context of the configured workspaces of the current project while filtering by running only the workspaces defined by this configuration option. Valid values for the \`workspace\` config are either: * Workspace names * Path to a workspace directory * Path to a parent workspace directory (will result in selecting all workspaces within that folder) When set for the \`npm init\` command, this may be set to the folder of a workspace which does not yet exist, to create the folder and set it up as a brand new workspace within the project. `, flatten: (key, obj, flatOptions) => { definitions['user-agent'].flatten('user-agent', obj, flatOptions) }, }), workspaces: new Definition('workspaces', { default: null, type: [null, Boolean], short: 'ws', envExport: false, description: ` Set to true to run the command in the context of **all** configured workspaces. Explicitly setting this to false will cause commands like \`install\` to ignore workspaces altogether. When not set explicitly: - Commands that operate on the \`node_modules\` tree (install, update, etc.) will link workspaces into the \`node_modules\` folder. - Commands that do other things (test, exec, publish, etc.) will operate on the root project, _unless_ one or more workspaces are specified in the \`workspace\` config. `, flatten: (key, obj, flatOptions) => { definitions['user-agent'].flatten('user-agent', obj, flatOptions) // TODO: this is a derived value, and should be reworked when we have a // pattern for derived value // workspacesEnabled is true whether workspaces is null or true // commands contextually work with workspaces or not regardless of // configuration, so we need an option specifically to disable workspaces flatOptions.workspacesEnabled = obj[key] !== false }, }), 'workspaces-update': new Definition('workspaces-update', { default: true, type: Boolean, description: ` If set to true, the npm cli will run an update after operations that may possibly change the workspaces installed to the \`node_modules\` folder. `, flatten, }), yes: new Definition('yes', { default: null, type: [null, Boolean], short: 'y', description: ` Automatically answer "yes" to any prompts that npm might print on the command line. `, }), } module.exports = definitions PK]�\���""config/lib/definitions/index.jsnu�[���const definitions = require('./definitions.js') // use the defined flattening function, and copy over any scoped // registries and registry-specific "nerfdart" configs verbatim // // TODO: make these getters so that we only have to make dirty // the thing that changed, and then flatten the fields that // could have changed when a config.set is called. // // TODO: move nerfdart auth stuff into a nested object that // is only passed along to paths that end up calling npm-registry-fetch. const flatten = (obj, flat = {}) => { for (const [key, val] of Object.entries(obj)) { const def = definitions[key] if (def && def.flatten) { def.flatten(key, obj, flat) } else if (/@.*:registry$/i.test(key) || /^\/\//.test(key)) { flat[key] = val } } return flat } const definitionProps = Object.entries(definitions) .reduce((acc, [key, { short = [], default: d }]) => { // can be either an array or string for (const s of [].concat(short)) { acc.shorthands[s] = [`--${key}`] } acc.defaults[key] = d return acc }, { shorthands: {}, defaults: {} }) // aliases where they get expanded into a completely different thing // these are NOT supported in the environment or npmrc files, only // expanded on the CLI. // TODO: when we switch off of nopt, use an arg parser that supports // more reasonable aliasing and short opts right in the definitions set. const shorthands = { 'enjoy-by': ['--before'], d: ['--loglevel', 'info'], dd: ['--loglevel', 'verbose'], ddd: ['--loglevel', 'silly'], quiet: ['--loglevel', 'warn'], q: ['--loglevel', 'warn'], s: ['--loglevel', 'silent'], silent: ['--loglevel', 'silent'], verbose: ['--loglevel', 'verbose'], desc: ['--description'], help: ['--usage'], local: ['--no-global'], n: ['--no-yes'], no: ['--no-yes'], porcelain: ['--parseable'], readonly: ['--read-only'], reg: ['--registry'], iwr: ['--include-workspace-root'], ...definitionProps.shorthands, } module.exports = { defaults: definitionProps.defaults, definitions, flatten, shorthands, } PK]�\+Jkk$config/lib/definitions/definition.jsnu�[���// class that describes a config key we know about // this keeps us from defining a config key and not // providing a default, description, etc. // // TODO: some kind of categorization system, so we can // say "these are for registry access", "these are for // version resolution" etc. const required = ['type', 'description', 'default', 'key'] const allowed = [ 'default', 'defaultDescription', 'deprecated', 'description', 'exclusive', 'flatten', 'hint', 'key', 'short', 'type', 'typeDescription', 'usage', 'envExport', ] const { semver: { type: semver }, Umask: { type: Umask }, url: { type: url }, path: { type: path }, } = require('../type-defs.js') class Definition { constructor (key, def) { this.key = key // if it's set falsey, don't export it, otherwise we do by default this.envExport = true Object.assign(this, def) this.validate() if (!this.defaultDescription) { this.defaultDescription = describeValue(this.default) } if (!this.typeDescription) { this.typeDescription = describeType(this.type) } // hint is only used for non-boolean values if (!this.hint) { if (this.type === Number) { this.hint = '<number>' } else { this.hint = `<${this.key}>` } } if (!this.usage) { this.usage = describeUsage(this) } } validate () { for (const req of required) { if (!Object.prototype.hasOwnProperty.call(this, req)) { throw new Error(`config lacks ${req}: ${this.key}`) } } if (!this.key) { throw new Error(`config lacks key: ${this.key}`) } for (const field of Object.keys(this)) { if (!allowed.includes(field)) { throw new Error(`config defines unknown field ${field}: ${this.key}`) } } } // a textual description of this config, suitable for help output describe () { const description = unindent(this.description) const noEnvExport = this.envExport ? '' : ` This value is not exported to the environment for child processes. ` const deprecated = !this.deprecated ? '' : `* DEPRECATED: ${unindent(this.deprecated)}\n` /* eslint-disable-next-line max-len */ const exclusive = !this.exclusive ? '' : `\nThis config can not be used with: \`${this.exclusive.join('`, `')}\`` return wrapAll(`#### \`${this.key}\` * Default: ${unindent(this.defaultDescription)} * Type: ${unindent(this.typeDescription)} ${deprecated} ${description} ${exclusive} ${noEnvExport}`) } } const describeUsage = def => { let key = '' // Single type if (!Array.isArray(def.type)) { if (def.short) { key = `-${def.short}|` } if (def.type === Boolean && def.default !== false) { key = `${key}--no-${def.key}` } else { key = `${key}--${def.key}` } if (def.type !== Boolean) { key = `${key} ${def.hint}` } return key } key = `--${def.key}` if (def.short) { key = `-${def.short}|--${def.key}` } // Multiple types let types = def.type const multiple = types.includes(Array) const bool = types.includes(Boolean) // null type means optional and doesn't currently affect usage output since // all non-optional params have defaults so we render everything as optional types = types.filter(t => t !== null && t !== Array && t !== Boolean) if (!types.length) { return key } let description if (!types.some(t => typeof t !== 'string')) { // Specific values, use specifics given description = `<${types.filter(d => d).join('|')}>` } else { // Generic values, use hint description = def.hint } if (bool) { // Currently none of our multi-type configs with boolean values default to // false so all their hints should show `--no-`, if we ever add ones that // default to false we can branch the logic here key = `--no-${def.key}|${key}` } const usage = `${key} ${description}` if (multiple) { return `${usage} [${usage} ...]` } else { return usage } } const describeType = type => { if (Array.isArray(type)) { const descriptions = type.filter(t => t !== Array).map(t => describeType(t)) // [a] => "a" // [a, b] => "a or b" // [a, b, c] => "a, b, or c" // [a, Array] => "a (can be set multiple times)" // [a, Array, b] => "a or b (can be set multiple times)" const last = descriptions.length > 1 ? [descriptions.pop()] : [] const oxford = descriptions.length > 1 ? ', or ' : ' or ' const words = [descriptions.join(', ')].concat(last).join(oxford) const multiple = type.includes(Array) ? ' (can be set multiple times)' : '' return `${words}${multiple}` } // Note: these are not quite the same as the description printed // when validation fails. In that case, we want to give the user // a bit more information to help them figure out what's wrong. switch (type) { case String: return 'String' case Number: return 'Number' case Umask: return 'Octal numeric string in range 0000..0777 (0..511)' case Boolean: return 'Boolean' case Date: return 'Date' case path: return 'Path' case semver: return 'SemVer string' case url: return 'URL' default: return describeValue(type) } } // if it's a string, quote it. otherwise, just cast to string. const describeValue = val => (typeof val === 'string' ? JSON.stringify(val) : String(val)) const unindent = s => { // get the first \n followed by a bunch of spaces, and pluck off // that many spaces from the start of every line. const match = s.match(/\n +/) return !match ? s.trim() : s.split(match[0]).join('\n').trim() } const wrap = s => { const cols = Math.min(Math.max(20, process.stdout.columns) || 80, 80) - 5 return unindent(s) .split(/[ \n]+/) .reduce((left, right) => { const last = left.split('\n').pop() const join = last.length && last.length + right.length > cols ? '\n' : ' ' return left + join + right }) } const wrapAll = s => { let inCodeBlock = false return s .split('\n\n') .map(block => { if (inCodeBlock || block.startsWith('```')) { inCodeBlock = !block.endsWith('```') return block } if (block.charAt(0) === '*') { return ( '* ' + block .slice(1) .trim() .split('\n* ') .map(li => { return wrap(li).replace(/\n/g, '\n ') }) .join('\n* ') ) } else { return wrap(block) } }) .join('\n\n') } module.exports = Definition PK]�\?0E!llconfig/lib/type-defs.jsnu�[���const nopt = require('nopt') const { validate: validateUmask } = require('./umask.js') class Umask {} class Semver {} const semverValid = require('semver/functions/valid') const validateSemver = (data, k, val) => { const valid = semverValid(val) if (!valid) { return false } data[k] = valid } const noptValidatePath = nopt.typeDefs.path.validate const validatePath = (data, k, val) => { if (typeof val !== 'string') { return false } return noptValidatePath(data, k, val) } // add descriptions so we can validate more usefully module.exports = { ...nopt.typeDefs, semver: { type: Semver, validate: validateSemver, description: 'full valid SemVer string', }, Umask: { type: Umask, validate: validateUmask, description: 'octal number in range 0o000..0o777 (0..511)', }, url: { ...nopt.typeDefs.url, description: 'full url with "http://"', }, path: { ...nopt.typeDefs.path, validate: validatePath, description: 'valid filesystem path', }, Number: { ...nopt.typeDefs.Number, description: 'numeric value', }, Boolean: { ...nopt.typeDefs.Boolean, description: 'boolean value (true or false)', }, Date: { ...nopt.typeDefs.Date, description: 'valid Date string', }, } // TODO: make nopt less of a global beast so this kludge isn't necessary nopt.typeDefs = module.exports PK]�\���<>>config/lib/type-description.jsnu�[���// return the description of the valid values of a field // returns a string for one thing, or an array of descriptions const typeDefs = require('./type-defs.js') const typeDescription = t => { if (!t || typeof t !== 'function' && typeof t !== 'object') { return t } if (Array.isArray(t)) { return t.map(t => typeDescription(t)) } for (const { type, description } of Object.values(typeDefs)) { if (type === t) { return description || type } } return t } module.exports = t => [].concat(typeDescription(t)).filter(t => t !== undefined) PK]�\|�&-�p�pconfig/lib/index.jsnu�[���// TODO: set the scope config from package.json or explicit cli config const { walkUp } = require('walk-up-path') const ini = require('ini') const nopt = require('nopt') const { log, time } = require('proc-log') const { resolve, dirname, join } = require('node:path') const { homedir } = require('node:os') const { readFile, writeFile, chmod, unlink, stat, mkdir, } = require('node:fs/promises') const fileExists = (...p) => stat(resolve(...p)) .then((st) => st.isFile()) .catch(() => false) const dirExists = (...p) => stat(resolve(...p)) .then((st) => st.isDirectory()) .catch(() => false) const hasOwnProperty = (obj, key) => Object.prototype.hasOwnProperty.call(obj, key) const typeDefs = require('./type-defs.js') const nerfDart = require('./nerf-dart.js') const envReplace = require('./env-replace.js') const parseField = require('./parse-field.js') const setEnvs = require('./set-envs.js') // types that can be saved back to const confFileTypes = new Set([ 'global', 'user', 'project', ]) const confTypes = new Set([ 'default', 'builtin', ...confFileTypes, 'env', 'cli', ]) class Config { #loaded = false #flatten // populated the first time we flatten the object #flatOptions = null static get typeDefs () { return typeDefs } constructor ({ definitions, shorthands, flatten, npmPath, // options just to override in tests, mostly env = process.env, argv = process.argv, platform = process.platform, execPath = process.execPath, cwd = process.cwd(), excludeNpmCwd = false, }) { // turn the definitions into nopt's weirdo syntax this.definitions = definitions const types = {} const defaults = {} this.deprecated = {} for (const [key, def] of Object.entries(definitions)) { defaults[key] = def.default types[key] = def.type if (def.deprecated) { this.deprecated[key] = def.deprecated.trim().replace(/\n +/, '\n') } } this.#flatten = flatten this.types = types this.shorthands = shorthands this.defaults = defaults this.npmPath = npmPath this.npmBin = join(this.npmPath, 'bin/npm-cli.js') this.argv = argv this.env = env this.execPath = execPath this.platform = platform this.cwd = cwd this.excludeNpmCwd = excludeNpmCwd // set when we load configs this.globalPrefix = null this.localPrefix = null this.localPackage = null // defaults to env.HOME, but will always be *something* this.home = null // set up the prototype chain of config objects const wheres = [...confTypes] this.data = new Map() let parent = null for (const where of wheres) { this.data.set(where, parent = new ConfigData(parent)) } this.data.set = () => { throw new Error('cannot change internal config data structure') } this.data.delete = () => { throw new Error('cannot change internal config data structure') } this.sources = new Map([]) this.list = [] for (const { data } of this.data.values()) { this.list.unshift(data) } Object.freeze(this.list) this.#loaded = false } get loaded () { return this.#loaded } get prefix () { return this.#get('global') ? this.globalPrefix : this.localPrefix } // return the location where key is found. find (key) { if (!this.loaded) { throw new Error('call config.load() before reading values') } // have to look in reverse order const entries = [...this.data.entries()] for (let i = entries.length - 1; i > -1; i--) { const [where, { data }] = entries[i] if (hasOwnProperty(data, key)) { return where } } return null } get (key, where) { if (!this.loaded) { throw new Error('call config.load() before reading values') } return this.#get(key, where) } // we need to get values sometimes, so use this internal one to do so // while in the process of loading. #get (key, where = null) { if (where !== null && !confTypes.has(where)) { throw new Error('invalid config location param: ' + where) } const { data } = this.data.get(where || 'cli') return where === null || hasOwnProperty(data, key) ? data[key] : undefined } set (key, val, where = 'cli') { if (!this.loaded) { throw new Error('call config.load() before setting values') } if (!confTypes.has(where)) { throw new Error('invalid config location param: ' + where) } this.#checkDeprecated(key) const { data, raw } = this.data.get(where) data[key] = val if (['global', 'user', 'project'].includes(where)) { raw[key] = val } // this is now dirty, the next call to this.valid will have to check it this.data.get(where)[_valid] = null // the flat options are invalidated, regenerate next time they're needed this.#flatOptions = null } get flat () { if (this.#flatOptions) { return this.#flatOptions } // create the object for flat options passed to deps const timeEnd = time.start('config:load:flatten') this.#flatOptions = {} // walk from least priority to highest for (const { data } of this.data.values()) { this.#flatten(data, this.#flatOptions) } this.#flatOptions.nodeBin = this.execPath this.#flatOptions.npmBin = this.npmBin timeEnd() return this.#flatOptions } delete (key, where = 'cli') { if (!this.loaded) { throw new Error('call config.load() before deleting values') } if (!confTypes.has(where)) { throw new Error('invalid config location param: ' + where) } const { data, raw } = this.data.get(where) delete data[key] if (['global', 'user', 'project'].includes(where)) { delete raw[key] } } async load () { if (this.loaded) { throw new Error('attempting to load npm config multiple times') } // first load the defaults, which sets the global prefix this.loadDefaults() // next load the builtin config, as this sets new effective defaults await this.loadBuiltinConfig() // cli and env are not async, and can set the prefix, relevant to project this.loadCLI() this.loadEnv() // next project config, which can affect userconfig location await this.loadProjectConfig() // then user config, which can affect globalconfig location await this.loadUserConfig() // last but not least, global config file await this.loadGlobalConfig() // set this before calling setEnvs, so that we don't have to share // private attributes, as that module also does a bunch of get operations this.#loaded = true // set proper globalPrefix now that everything is loaded this.globalPrefix = this.get('prefix') this.setEnvs() } loadDefaults () { this.loadGlobalPrefix() this.loadHome() const defaultsObject = { ...this.defaults, prefix: this.globalPrefix, } try { defaultsObject['npm-version'] = require(join(this.npmPath, 'package.json')).version } catch { // in some weird state where the passed in npmPath does not have a package.json // this will never happen in npm, but is guarded here in case this is consumed // in other ways + tests } this.#loadObject(defaultsObject, 'default', 'default values') const { data } = this.data.get('default') // if the prefix is set on cli, env, or userconfig, then we need to // default the globalconfig file to that location, instead of the default // global prefix. It's weird that `npm get globalconfig --prefix=/foo` // returns `/foo/etc/npmrc`, but better to not change it at this point. // define a custom getter, but turn into a normal prop // if we set it. otherwise it can't be set on child objects Object.defineProperty(data, 'globalconfig', { get: () => resolve(this.#get('prefix'), 'etc/npmrc'), set (value) { Object.defineProperty(data, 'globalconfig', { value, configurable: true, writable: true, enumerable: true, }) }, configurable: true, enumerable: true, }) } loadHome () { this.home = this.env.HOME || homedir() } loadGlobalPrefix () { if (this.globalPrefix) { throw new Error('cannot load default global prefix more than once') } if (this.env.PREFIX) { this.globalPrefix = this.env.PREFIX } else if (this.platform === 'win32') { // c:\node\node.exe --> prefix=c:\node\ this.globalPrefix = dirname(this.execPath) } else { // /usr/local/bin/node --> prefix=/usr/local this.globalPrefix = dirname(dirname(this.execPath)) // destdir only is respected on Unix if (this.env.DESTDIR) { this.globalPrefix = join(this.env.DESTDIR, this.globalPrefix) } } } loadEnv () { const conf = Object.create(null) for (const [envKey, envVal] of Object.entries(this.env)) { if (!/^npm_config_/i.test(envKey) || envVal === '') { continue } let key = envKey.slice('npm_config_'.length) if (!key.startsWith('//')) { // don't normalize nerf-darted keys key = key.replace(/(?!^)_/g, '-') // don't replace _ at the start of the key .toLowerCase() } conf[key] = envVal } this.#loadObject(conf, 'env', 'environment') } loadCLI () { nopt.invalidHandler = (k, val, type) => this.invalidHandler(k, val, type, 'command line options', 'cli') const conf = nopt(this.types, this.shorthands, this.argv) nopt.invalidHandler = null this.parsedArgv = conf.argv delete conf.argv this.#loadObject(conf, 'cli', 'command line options') } get valid () { for (const [where, { valid }] of this.data.entries()) { if (valid === false || valid === null && !this.validate(where)) { return false } } return true } validate (where) { if (!where) { let valid = true const authProblems = [] for (const entryWhere of this.data.keys()) { // no need to validate our defaults, we know they're fine // cli was already validated when parsed the first time if (entryWhere === 'default' || entryWhere === 'builtin' || entryWhere === 'cli') { continue } const ret = this.validate(entryWhere) valid = valid && ret if (['global', 'user', 'project'].includes(entryWhere)) { // after validating everything else, we look for old auth configs we no longer support // if these keys are found, we build up a list of them and the appropriate action and // attach it as context on the thrown error // first, keys that should be removed for (const key of ['_authtoken', '-authtoken']) { if (this.get(key, entryWhere)) { authProblems.push({ action: 'delete', key, where: entryWhere }) } } // NOTE we pull registry without restricting to the current 'where' because we want to // suggest scoping things to the registry they would be applied to, which is the default // regardless of where it was defined const nerfedReg = nerfDart(this.get('registry')) // keys that should be nerfed but currently are not for (const key of ['_auth', '_authToken', 'username', '_password']) { if (this.get(key, entryWhere)) { // username and _password must both exist in the same file to be recognized correctly if (key === 'username' && !this.get('_password', entryWhere)) { authProblems.push({ action: 'delete', key, where: entryWhere }) } else if (key === '_password' && !this.get('username', entryWhere)) { authProblems.push({ action: 'delete', key, where: entryWhere }) } else { authProblems.push({ action: 'rename', from: key, to: `${nerfedReg}:${key}`, where: entryWhere, }) } } } } } if (authProblems.length) { const { ErrInvalidAuth } = require('./errors.js') throw new ErrInvalidAuth(authProblems) } return valid } else { const obj = this.data.get(where) obj[_valid] = true nopt.invalidHandler = (k, val, type) => this.invalidHandler(k, val, type, obj.source, where) nopt.clean(obj.data, this.types, typeDefs) nopt.invalidHandler = null return obj[_valid] } } // fixes problems identified by validate(), accepts the 'problems' property from a thrown // ErrInvalidAuth to avoid having to check everything again repair (problems) { if (!problems) { try { this.validate() } catch (err) { // coverage skipped here because we don't need to test re-throwing an error // istanbul ignore next if (err.code !== 'ERR_INVALID_AUTH') { throw err } problems = err.problems } finally { if (!problems) { problems = [] } } } for (const problem of problems) { // coverage disabled for else branch because it doesn't do anything and shouldn't // istanbul ignore else if (problem.action === 'delete') { this.delete(problem.key, problem.where) } else if (problem.action === 'rename') { const raw = this.data.get(problem.where).raw?.[problem.from] const calculated = this.get(problem.from, problem.where) this.set(problem.to, raw || calculated, problem.where) this.delete(problem.from, problem.where) } } } // Returns true if the value is coming directly from the source defined // in default definitions, if the current value for the key config is // coming from any other different source, returns false isDefault (key) { const [defaultType, ...types] = [...confTypes] const defaultData = this.data.get(defaultType).data return hasOwnProperty(defaultData, key) && types.every(type => { const typeData = this.data.get(type).data return !hasOwnProperty(typeData, key) }) } invalidHandler (k, val, type, source, where) { const typeDescription = require('./type-description.js') log.warn( 'invalid config', k + '=' + JSON.stringify(val), `set in ${source}` ) this.data.get(where)[_valid] = false if (Array.isArray(type)) { if (type.includes(typeDefs.url.type)) { type = typeDefs.url.type } else { /* istanbul ignore if - no actual configs matching this, but * path types SHOULD be handled this way, like URLs, for the * same reason */ if (type.includes(typeDefs.path.type)) { type = typeDefs.path.type } } } const typeDesc = typeDescription(type) const mustBe = typeDesc .filter(m => m !== undefined && m !== Array) const msg = 'Must be' + this.#getOneOfKeywords(mustBe, typeDesc) const desc = mustBe.length === 1 ? mustBe[0] : [...new Set(mustBe.map(n => typeof n === 'string' ? n : JSON.stringify(n)))].join(', ') log.warn('invalid config', msg, desc) } #getOneOfKeywords (mustBe, typeDesc) { let keyword if (mustBe.length === 1 && typeDesc.includes(Array)) { keyword = ' one or more' } else if (mustBe.length > 1 && typeDesc.includes(Array)) { keyword = ' one or more of:' } else if (mustBe.length > 1) { keyword = ' one of:' } else { keyword = '' } return keyword } #loadObject (obj, where, source, er = null) { // obj is the raw data read from the file const conf = this.data.get(where) if (conf.source) { const m = `double-loading "${where}" configs from ${source}, ` + `previously loaded from ${conf.source}` throw new Error(m) } if (this.sources.has(source)) { const m = `double-loading config "${source}" as "${where}", ` + `previously loaded as "${this.sources.get(source)}"` throw new Error(m) } conf.source = source this.sources.set(source, where) if (er) { conf.loadError = er if (er.code !== 'ENOENT') { log.verbose('config', `error loading ${where} config`, er) } } else { conf.raw = obj for (const [key, value] of Object.entries(obj)) { const k = envReplace(key, this.env) const v = this.parseField(value, k) if (where !== 'default') { this.#checkDeprecated(k) if (this.definitions[key]?.exclusive) { for (const exclusive of this.definitions[key].exclusive) { if (!this.isDefault(exclusive)) { throw new TypeError(`--${key} can not be provided when using --${exclusive}`) } } } } conf.data[k] = v } } } #checkDeprecated (key) { // XXX(npm9+) make this throw an error if (this.deprecated[key]) { log.warn('config', key, this.deprecated[key]) } } // Parse a field, coercing it to the best type available. parseField (f, key, listElement = false) { return parseField(f, key, this, listElement) } async #loadFile (file, type) { // only catch the error from readFile, not from the loadObject call log.silly('config', `load:file:${file}`) await readFile(file, 'utf8').then( data => { const parsedConfig = ini.parse(data) if (type === 'project' && parsedConfig.prefix) { // Log error if prefix is mentioned in project .npmrc /* eslint-disable-next-line max-len */ log.error('config', `prefix cannot be changed from project config: ${file}.`) } return this.#loadObject(parsedConfig, type, file) }, er => this.#loadObject(null, type, file, er) ) } loadBuiltinConfig () { return this.#loadFile(resolve(this.npmPath, 'npmrc'), 'builtin') } async loadProjectConfig () { // the localPrefix can be set by the CLI config, but otherwise is // found by walking up the folder tree. either way, we load it before // we return to make sure localPrefix is set await this.loadLocalPrefix() // if we have not detected a local package json yet, try now that we // have a local prefix if (this.localPackage == null) { this.localPackage = await fileExists(this.localPrefix, 'package.json') } if (this.#get('global') === true || this.#get('location') === 'global') { this.data.get('project').source = '(global mode enabled, ignored)' this.sources.set(this.data.get('project').source, 'project') return } const projectFile = resolve(this.localPrefix, '.npmrc') // if we're in the ~ directory, and there happens to be a node_modules // folder (which is not TOO uncommon, it turns out), then we can end // up loading the "project" config where the "userconfig" will be, // which causes some calamaties. So, we only load project config if // it doesn't match what the userconfig will be. if (projectFile !== this.#get('userconfig')) { return this.#loadFile(projectFile, 'project') } else { this.data.get('project').source = '(same as "user" config, ignored)' this.sources.set(this.data.get('project').source, 'project') } } async loadLocalPrefix () { const cliPrefix = this.#get('prefix', 'cli') if (cliPrefix) { this.localPrefix = cliPrefix return } const cliWorkspaces = this.#get('workspaces', 'cli') const isGlobal = this.#get('global') || this.#get('location') === 'global' for (const p of walkUp(this.cwd)) { // HACK: this is an option set in tests to stop the local prefix from being set // on tests that are created inside the npm repo if (this.excludeNpmCwd && p === this.npmPath) { break } const hasPackageJson = await fileExists(p, 'package.json') if (!this.localPrefix && (hasPackageJson || await dirExists(p, 'node_modules'))) { this.localPrefix = p this.localPackage = hasPackageJson // if workspaces are disabled, or we're in global mode, return now if (cliWorkspaces === false || isGlobal) { return } // otherwise, continue the loop continue } if (this.localPrefix && hasPackageJson) { const rpj = require('read-package-json-fast') // if we already set localPrefix but this dir has a package.json // then we need to see if `p` is a workspace root by reading its package.json // however, if reading it fails then we should just move on const pkg = await rpj(resolve(p, 'package.json')).catch(() => false) if (!pkg) { continue } const mapWorkspaces = require('@npmcli/map-workspaces') const workspaces = await mapWorkspaces({ cwd: p, pkg }) for (const w of workspaces.values()) { if (w === this.localPrefix) { // see if there's a .npmrc file in the workspace, if so log a warning if (await fileExists(this.localPrefix, '.npmrc')) { log.warn('config', `ignoring workspace config at ${this.localPrefix}/.npmrc`) } // set the workspace in the default layer, which allows it to be overridden easily const { data } = this.data.get('default') data.workspace = [this.localPrefix] this.localPrefix = p this.localPackage = hasPackageJson log.info('config', `found workspace root at ${this.localPrefix}`) // we found a root, so we return now return } } } } if (!this.localPrefix) { this.localPrefix = this.cwd } } loadUserConfig () { return this.#loadFile(this.#get('userconfig'), 'user') } loadGlobalConfig () { return this.#loadFile(this.#get('globalconfig'), 'global') } async save (where) { if (!this.loaded) { throw new Error('call config.load() before saving') } if (!confFileTypes.has(where)) { throw new Error('invalid config location param: ' + where) } const conf = this.data.get(where) conf[_loadError] = null if (where === 'user') { // if email is nerfed, then we want to de-nerf it const nerfed = nerfDart(this.get('registry')) const email = this.get(`${nerfed}:email`, 'user') if (email) { this.delete(`${nerfed}:email`, 'user') this.set('email', email, 'user') } } // We need the actual raw data before we called parseField so that we are // saving the same content back to the file const iniData = ini.stringify(conf.raw).trim() + '\n' if (!iniData.trim()) { // ignore the unlink error (eg, if file doesn't exist) await unlink(conf.source).catch(() => {}) return } const dir = dirname(conf.source) await mkdir(dir, { recursive: true }) await writeFile(conf.source, iniData, 'utf8') const mode = where === 'user' ? 0o600 : 0o666 await chmod(conf.source, mode) } clearCredentialsByURI (uri, level = 'user') { const nerfed = nerfDart(uri) const def = nerfDart(this.get('registry')) if (def === nerfed) { this.delete(`-authtoken`, level) this.delete(`_authToken`, level) this.delete(`_authtoken`, level) this.delete(`_auth`, level) this.delete(`_password`, level) this.delete(`username`, level) // de-nerf email if it's nerfed to the default registry const email = this.get(`${nerfed}:email`, level) if (email) { this.set('email', email, level) } } this.delete(`${nerfed}:_authToken`, level) this.delete(`${nerfed}:_auth`, level) this.delete(`${nerfed}:_password`, level) this.delete(`${nerfed}:username`, level) this.delete(`${nerfed}:email`, level) this.delete(`${nerfed}:certfile`, level) this.delete(`${nerfed}:keyfile`, level) } setCredentialsByURI (uri, { token, username, password, certfile, keyfile }) { const nerfed = nerfDart(uri) // field that hasn't been used as documented for a LONG time, // and as of npm 7.10.0, isn't used at all. We just always // send auth if we have it, only to the URIs under the nerf dart. this.delete(`${nerfed}:always-auth`, 'user') this.delete(`${nerfed}:email`, 'user') if (certfile && keyfile) { this.set(`${nerfed}:certfile`, certfile, 'user') this.set(`${nerfed}:keyfile`, keyfile, 'user') // cert/key may be used in conjunction with other credentials, thus no `else` } if (token) { this.set(`${nerfed}:_authToken`, token, 'user') this.delete(`${nerfed}:_password`, 'user') this.delete(`${nerfed}:username`, 'user') } else if (username || password) { if (!username) { throw new Error('must include username') } if (!password) { throw new Error('must include password') } this.delete(`${nerfed}:_authToken`, 'user') this.set(`${nerfed}:username`, username, 'user') // note: not encrypted, no idea why we bothered to do this, but oh well // protects against shoulder-hacks if password is memorable, I guess? const encoded = Buffer.from(password, 'utf8').toString('base64') this.set(`${nerfed}:_password`, encoded, 'user') } else if (!certfile || !keyfile) { throw new Error('No credentials to set.') } } // this has to be a bit more complicated to support legacy data of all forms getCredentialsByURI (uri) { const nerfed = nerfDart(uri) const def = nerfDart(this.get('registry')) const creds = {} // email is handled differently, it used to always be nerfed and now it never should be // if it's set nerfed to the default registry, then we copy it to the unnerfed key // TODO: evaluate removing 'email' from the credentials object returned here const email = this.get(`${nerfed}:email`) || this.get('email') if (email) { if (nerfed === def) { this.set('email', email, 'user') } creds.email = email } const certfileReg = this.get(`${nerfed}:certfile`) const keyfileReg = this.get(`${nerfed}:keyfile`) if (certfileReg && keyfileReg) { creds.certfile = certfileReg creds.keyfile = keyfileReg // cert/key may be used in conjunction with other credentials, thus no `return` } const tokenReg = this.get(`${nerfed}:_authToken`) if (tokenReg) { creds.token = tokenReg return creds } const userReg = this.get(`${nerfed}:username`) const passReg = this.get(`${nerfed}:_password`) if (userReg && passReg) { creds.username = userReg creds.password = Buffer.from(passReg, 'base64').toString('utf8') const auth = `${creds.username}:${creds.password}` creds.auth = Buffer.from(auth, 'utf8').toString('base64') return creds } const authReg = this.get(`${nerfed}:_auth`) if (authReg) { const authDecode = Buffer.from(authReg, 'base64').toString('utf8') const authSplit = authDecode.split(':') creds.username = authSplit.shift() creds.password = authSplit.join(':') creds.auth = authReg return creds } // at this point, nothing else is usable so just return what we do have return creds } // set up the environment object we have with npm_config_* environs // for all configs that are different from their default values, and // set EDITOR and HOME. setEnvs () { setEnvs(this) } } const _loadError = Symbol('loadError') const _valid = Symbol('valid') class ConfigData { #data #source = null #raw = null constructor (parent) { this.#data = Object.create(parent && parent.data) this.#raw = {} this[_valid] = true } get data () { return this.#data } get valid () { return this[_valid] } set source (s) { if (this.#source) { throw new Error('cannot set ConfigData source more than once') } this.#source = s } get source () { return this.#source } set loadError (e) { if (this[_loadError] || (Object.keys(this.#raw).length)) { throw new Error('cannot set ConfigData loadError after load') } this[_loadError] = e } get loadError () { return this[_loadError] } set raw (r) { if (Object.keys(this.#raw).length || this[_loadError]) { throw new Error('cannot set ConfigData raw after load') } this.#raw = r } get raw () { return this.#raw } } module.exports = Config PK]�\�*J��config/lib/umask.jsnu�[���const parse = val => { // this is run via nopt and parse field where everything is // converted to a string first, ignoring coverage for now // instead of figuring out what is happening under the hood in nopt // istanbul ignore else if (typeof val === 'string') { if (/^0o?[0-7]+$/.test(val)) { return parseInt(val.replace(/^0o?/, ''), 8) } else if (/^[1-9][0-9]*$/.test(val)) { return parseInt(val, 10) } else { throw new Error(`invalid umask value: ${val}`) } } else { if (typeof val !== 'number') { throw new Error(`invalid umask value: ${val}`) } val = Math.floor(val) if (val < 0 || val > 511) { throw new Error(`invalid umask value: ${val}`) } return val } } const validate = (data, k, val) => { try { data[k] = parse(val) return true } catch (er) { return false } } module.exports = { parse, validate } PK]�\Q_z���config/lib/env-replace.jsnu�[���// replace any ${ENV} values with the appropriate environ. const envExpr = /(?<!\\)(\\*)\$\{([^${}]+)\}/g module.exports = (f, env) => f.replace(envExpr, (orig, esc, name) => { const val = env[name] !== undefined ? env[name] : `$\{${name}}` // consume the escape chars that are relevant. if (esc.length % 2) { return orig.slice((esc.length + 1) / 2) } return (esc.slice(esc.length / 2)) + val }) PK]�\pm����config/lib/errors.jsnu�[���'use strict' class ErrInvalidAuth extends Error { constructor (problems) { let message = 'Invalid auth configuration found: ' message += problems.map((problem) => { // istanbul ignore else if (problem.action === 'delete') { return `\`${problem.key}\` is not allowed in ${problem.where} config` } else if (problem.action === 'rename') { return `\`${problem.from}\` must be renamed to \`${problem.to}\` in ${problem.where} config` } }).join(', ') message += '\nPlease run `npm config fix` to repair your configuration.`' super(message) this.code = 'ERR_INVALID_AUTH' this.problems = problems } } module.exports = { ErrInvalidAuth, } PK]�\[��� � config/lib/set-envs.jsnu�[���// Set environment variables for any non-default configs, // so that they're already there when we run lifecycle scripts. // // See https://github.com/npm/rfcs/pull/90 // Return the env key if this is a thing that belongs in the env. // Ie, if the key isn't a @scope, //nerf.dart, or _private, // and the value is a string or array. Otherwise return false. const envKey = (key, val) => { return !/^[/@_]/.test(key) && (typeof envVal(val) === 'string') && `npm_config_${key.replace(/-/g, '_').toLowerCase()}` } const envVal = val => Array.isArray(val) ? val.map(v => envVal(v)).join('\n\n') : val === null || val === undefined || val === false ? '' : typeof val === 'object' ? null : String(val) const sameConfigValue = (def, val) => !Array.isArray(val) || !Array.isArray(def) ? def === val : sameArrayValue(def, val) const sameArrayValue = (def, val) => { if (def.length !== val.length) { return false } for (let i = 0; i < def.length; i++) { /* istanbul ignore next - there are no array configs where the default * is not an empty array, so this loop is a no-op, but it's the correct * thing to do if we ever DO add a config like that. */ if (def[i] !== val[i]) { return false } } return true } const setEnv = (env, rawKey, rawVal) => { const val = envVal(rawVal) const key = envKey(rawKey, val) if (key && val !== null) { env[key] = val } } const setEnvs = (config) => { // This ensures that all npm config values that are not the defaults are // shared appropriately with child processes, without false positives. const { env, defaults, definitions, list: [cliConf, envConf], } = config env.INIT_CWD = process.cwd() // if the key is deprecated, skip it always. // if the key is the default value, // if the environ is NOT the default value, // set the environ // else skip it, it's fine // if the key is NOT the default value, // if the env is setting it, then leave it (already set) // otherwise, set the env const cliSet = new Set(Object.keys(cliConf)) const envSet = new Set(Object.keys(envConf)) for (const key in cliConf) { const { deprecated, envExport = true } = definitions[key] || {} if (deprecated || envExport === false) { continue } if (sameConfigValue(defaults[key], cliConf[key])) { // config is the default, if the env thought different, then we // have to set it BACK to the default in the environment. if (!sameConfigValue(envConf[key], cliConf[key])) { setEnv(env, key, cliConf[key]) } } else { // config is not the default. if the env wasn't the one to set // it that way, then we have to put it in the env if (!(envSet.has(key) && !cliSet.has(key))) { setEnv(env, key, cliConf[key]) } } } // also set some other common nice envs that we want to rely on env.HOME = config.home env.npm_config_global_prefix = config.globalPrefix env.npm_config_local_prefix = config.localPrefix if (cliConf.editor) { env.EDITOR = cliConf.editor } // note: this doesn't afect the *current* node process, of course, since // it's already started, but it does affect the options passed to scripts. if (cliConf['node-options']) { env.NODE_OPTIONS = cliConf['node-options'] } env.npm_execpath = config.npmBin env.NODE = env.npm_node_execpath = config.execPath } module.exports = setEnvs PK]�\,�z7��config/lib/parse-field.jsnu�[���// Parse a field, coercing it to the best type available. const typeDefs = require('./type-defs.js') const envReplace = require('./env-replace.js') const { resolve } = require('node:path') const { parse: umaskParse } = require('./umask.js') const parseField = (f, key, opts, listElement = false) => { if (typeof f !== 'string' && !Array.isArray(f)) { return f } const { platform, types, home, env } = opts // type can be array or a single thing. coerce to array. const typeList = new Set([].concat(types[key])) const isPath = typeList.has(typeDefs.path.type) const isBool = typeList.has(typeDefs.Boolean.type) const isString = isPath || typeList.has(typeDefs.String.type) const isUmask = typeList.has(typeDefs.Umask.type) const isNumber = typeList.has(typeDefs.Number.type) const isList = !listElement && typeList.has(Array) const isDate = typeList.has(typeDefs.Date.type) if (Array.isArray(f)) { return !isList ? f : f.map(field => parseField(field, key, opts, true)) } // now we know it's a string f = f.trim() // list types get put in the environment separated by double-\n // usually a single \n would suffice, but ca/cert configs can contain // line breaks and multiple entries. if (isList) { return parseField(f.split('\n\n'), key, opts) } // --foo is like --foo=true for boolean types if (isBool && !isString && f === '') { return true } // string types can be the string 'true', 'false', etc. // otherwise, parse these values out if (!isString && !isPath && !isNumber) { switch (f) { case 'true': return true case 'false': return false case 'null': return null case 'undefined': return undefined } } f = envReplace(f, env) if (isDate) { return new Date(f) } if (isPath) { const homePattern = platform === 'win32' ? /^~(\/|\\)/ : /^~\// if (homePattern.test(f) && home) { f = resolve(home, f.slice(2)) } else { f = resolve(f) } } if (isUmask) { try { return umaskParse(f) } catch (er) { // let it warn later when we validate return f } } if (isNumber && !isNaN(f)) { f = +f } return f } module.exports = parseField PK]�\.9����config/LICENSEnu�[���The ISC License Copyright (c) npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. PK]�\eW�)9&9&config/README.mdnu�[���# `@npmcli/config` Configuration management for the npm cli. This module is the spiritual descendant of [`npmconf`](http://npm.im/npmconf), and the code that once lived in npm's `lib/config/` folder. It does the management of configuration files that npm uses, but importantly, does _not_ define all the configuration defaults or types, as those parts make more sense to live within the npm CLI itself. The only exceptions: - The `prefix` config value has some special semantics, setting the local prefix if specified on the CLI options and not in global mode, or the global prefix otherwise. - The `project` config file is loaded based on the local prefix (which can only be set by the CLI config options, and otherwise defaults to a walk up the folder tree to the first parent containing a `node_modules` folder, `package.json` file, or `package-lock.json` file.) - The `userconfig` value, as set by the environment and CLI (defaulting to `~/.npmrc`, is used to load user configs. - The `globalconfig` value, as set by the environment, CLI, and `userconfig` file (defaulting to `$PREFIX/etc/npmrc`) is used to load global configs. - A `builtin` config, read from a `npmrc` file in the root of the npm project itself, overrides all defaults. The resulting hierarchy of configs: - CLI switches. eg `--some-key=some-value` on the command line. These are parsed by [`nopt`](http://npm.im/nopt), which is not a great choice, but it's the one that npm has used forever, and changing it will be difficult. - Environment variables. eg `npm_config_some_key=some_value` in the environment. There is no way at this time to modify this prefix. - INI-formatted project configs. eg `some-key = some-value` in the `localPrefix` folder (ie, the `cwd`, or its nearest parent that contains either a `node_modules` folder or `package.json` file.) - INI-formatted userconfig file. eg `some-key = some-value` in `~/.npmrc`. The `userconfig` config value can be overridden by the `cli`, `env`, or `project` configs to change this value. - INI-formatted globalconfig file. eg `some-key = some-value` in the `globalPrefix` folder, which is inferred by looking at the location of the node executable, or the `prefix` setting in the `cli`, `env`, `project`, or `userconfig`. The `globalconfig` value at any of those levels can override this. - INI-formatted builtin config file. eg `some-key = some-value` in `/usr/local/lib/node_modules/npm/npmrc`. This is not configurable, and is determined by looking in the `npmPath` folder. - Default values (passed in by npm when it loads this module). ## USAGE ```js const Config = require('@npmcli/config') const { shorthands, definitions, flatten } = require('@npmcli/config/lib/definitions') const conf = new Config({ // path to the npm module being run npmPath: resolve(__dirname, '..'), definitions, shorthands, flatten, // optional, defaults to process.argv // argv: [] <- if you are using this package in your own cli // and dont want to have colliding argv argv: process.argv, // optional, defaults to process.env env: process.env, // optional, defaults to process.execPath execPath: process.execPath, // optional, defaults to process.platform platform: process.platform, // optional, defaults to process.cwd() cwd: process.cwd(), }) // emits log events on the process object // see `proc-log` for more info process.on('log', (level, ...args) => { console.log(level, ...args) }) // returns a promise that fails if config loading fails, and // resolves when the config object is ready for action conf.load().then(() => { conf.validate() console.log('loaded ok! some-key = ' + conf.get('some-key')) }).catch(er => { console.error('error loading configs!', er) }) ``` ## API The `Config` class is the sole export. ```js const Config = require('@npmcli/config') ``` ### static `Config.typeDefs` The type definitions passed to `nopt` for CLI option parsing and known configuration validation. ### constructor `new Config(options)` Options: - `types` Types of all known config values. Note that some are effectively given semantic value in the config loading process itself. - `shorthands` An object mapping a shorthand value to an array of CLI arguments that replace it. - `defaults` Default values for each of the known configuration keys. These should be defined for all configs given a type, and must be valid. - `npmPath` The path to the `npm` module, for loading the `builtin` config file. - `cwd` Optional, defaults to `process.cwd()`, used for inferring the `localPrefix` and loading the `project` config. - `platform` Optional, defaults to `process.platform`. Used when inferring the `globalPrefix` from the `execPath`, since this is done diferently on Windows. - `execPath` Optional, defaults to `process.execPath`. Used to infer the `globalPrefix`. - `env` Optional, defaults to `process.env`. Source of the environment variables for configuration. - `argv` Optional, defaults to `process.argv`. Source of the CLI options used for configuration. Returns a `config` object, which is not yet loaded. Fields: - `config.globalPrefix` The prefix for `global` operations. Set by the `prefix` config value, or defaults based on the location of the `execPath` option. - `config.localPrefix` The prefix for `local` operations. Set by the `prefix` config value on the CLI only, or defaults to either the `cwd` or its nearest ancestor containing a `node_modules` folder or `package.json` file. - `config.sources` A read-only `Map` of the file (or a comment, if no file found, or relevant) to the config level loaded from that source. - `config.data` A `Map` of config level to `ConfigData` objects. These objects should not be modified directly under any circumstances. - `source` The source where this data was loaded from. - `raw` The raw data used to generate this config data, as it was parsed initially from the environment, config file, or CLI options. - `data` The data object reflecting the inheritance of configs up to this point in the chain. - `loadError` Any errors encountered that prevented the loading of this config data. - `config.list` A list sorted in priority of all the config data objects in the prototype chain. `config.list[0]` is the `cli` level, `config.list[1]` is the `env` level, and so on. - `cwd` The `cwd` param - `env` The `env` param - `argv` The `argv` param - `execPath` The `execPath` param - `platform` The `platform` param - `defaults` The `defaults` param - `shorthands` The `shorthands` param - `types` The `types` param - `npmPath` The `npmPath` param - `globalPrefix` The effective `globalPrefix` - `localPrefix` The effective `localPrefix` - `prefix` If `config.get('global')` is true, then `globalPrefix`, otherwise `localPrefix` - `home` The user's home directory, found by looking at `env.HOME` or calling `os.homedir()`. - `loaded` A boolean indicating whether or not configs are loaded - `valid` A getter that returns `true` if all the config objects are valid. Any data objects that have been modified with `config.set(...)` will be re-evaluated when `config.valid` is read. ### `config.load()` Load configuration from the various sources of information. Returns a `Promise` that resolves when configuration is loaded, and fails if a fatal error is encountered. ### `config.find(key)` Find the effective place in the configuration levels a given key is set. Returns one of: `cli`, `env`, `project`, `user`, `global`, `builtin`, or `default`. Returns `null` if the key is not set. ### `config.get(key, where = 'cli')` Load the given key from the config stack. ### `config.set(key, value, where = 'cli')` Set the key to the specified value, at the specified level in the config stack. ### `config.delete(key, where = 'cli')` Delete the configuration key from the specified level in the config stack. ### `config.validate(where)` Verify that all known configuration options are set to valid values, and log a warning if they are invalid. Invalid auth options will cause this method to throw an error with a `code` property of `ERR_INVALID_AUTH`, and a `problems` property listing the specific concerns with the current configuration. If `where` is not set, then all config objects are validated. Returns `true` if all configs are valid. Note that it's usually enough (and more efficient) to just check `config.valid`, since each data object is marked for re-evaluation on every `config.set()` operation. ### `config.repair(problems)` Accept an optional array of problems (as thrown by `config.validate()`) and perform the necessary steps to resolve them. If no problems are provided, this method will call `config.validate()` internally to retrieve them. Note that you must `await config.save('user')` in order to persist the changes. ### `config.isDefault(key)` Returns `true` if the value is coming directly from the default definitions, if the current value for the key config is coming from any other source, returns `false`. This method can be used for avoiding or tweaking default values, e.g: > Given a global default definition of foo='foo' it's possible to read that > value such as: > > ```js > const save = config.get('foo') > ``` > > Now in a different place of your app it's possible to avoid using the `foo` > default value, by checking to see if the current config value is currently > one that was defined by the default definitions: > > ```js > const save = config.isDefault('foo') ? 'bar' : config.get('foo') > ``` ### `config.save(where)` Save the config file specified by the `where` param. Must be one of `project`, `user`, `global`, `builtin`. PK]�\h�T�^^package-json/package.jsonnu�[���PK]�\,"a̘�"�package-json/lib/update-scripts.jsnu�[���PK]�\�j!rr%� package-json/lib/update-workspaces.jsnu�[���PK]�\#Ͷ��'X package-json/lib/update-dependencies.jsnu�[���PK]�\V_0�package-json/lib/index.jsnu�[���PK]�\����M�M�.package-json/lib/normalize.jsnu�[���PK]�\)xU1���|package-json/LICENSEnu�[���PK]�\ewHH �metavuln-calculator/package.jsonnu�[���PK]�\��N����metavuln-calculator/lib/hash.jsnu�[���PK]�\�~4t4t4#��metavuln-calculator/lib/advisory.jsnu�[���PK]�\Kt�s�� K�metavuln-calculator/lib/index.jsnu�[���PK]�\��� '��metavuln-calculator/lib/get-dep-spec.jsnu�[���PK]�\.9������metavuln-calculator/LICENSEnu�[���PK]�\�2q�KK�arborist/package.jsonnu�[���PK]�\ .r�ff��arborist/lib/printable.jsnu�[���PK]�\�����!W�arborist/lib/override-resolves.jsnu�[���PK]�\>��}jj��arborist/lib/spec-from-lock.jsnu�[���PK]�\��A�arborist/lib/dep-valid.jsnu�[���PK]�\"�XOO�arborist/lib/place-dep.jsnu�[���PK]�\��N���[arborist/lib/tracker.jsnu�[���PK]�\ւy���garborist/lib/add-rm-pkg-deps.jsnu�[���PK]�\ź�%%"�{arborist/lib/consistent-resolve.jsnu�[���PK]�\�b�Q��&&�arborist/lib/deepest-nesting-target.jsnu�[���PK]�\:��\??/�arborist/lib/link.jsnu�[���PK]�\]}L_ _ ��arborist/lib/realpath.jsnu�[���PK]�\���2�/�/Y�arborist/lib/audit-report.jsnu�[���PK]�\��).:.:$m�arborist/lib/arborist/load-actual.jsnu�[���PK]�\=�?^<<)�arborist/lib/arborist/isolated-reifier.jsnu�[���PK]�\���[Z.Z. gBarborist/lib/arborist/rebuild.jsnu�[���PK]�\��X��"�"qarborist/lib/arborist/index.jsnu�[���PK]�\k�Ѽ~%~%%�arborist/lib/arborist/load-virtual.jsnu�[���PK]�\�?�������arborist/lib/arborist/reify.jsnu�[���PK]�\o������)�arborist/lib/arborist/build-ideal-tree.jsnu�[���PK]�\=����$$harborist/lib/case-insensitive-map.jsnu�[���PK]�\�w��IIOmarborist/lib/optional-set.jsnu�[���PK]�\��ee�rarborist/lib/signals.jsnu�[���PK]�\|�<���xarborist/lib/relpath.jsnu�[���PK]�\��h�=*=*_yarborist/lib/yarn-lock.jsnu�[���PK]�\�ٖ..�arborist/lib/tree-check.jsnu�[���PK]�\zK�}]�arborist/lib/index.jsnu�[���PK]�\W���FF��arborist/lib/vuln.jsnu�[���PK]�\A�A���=�arborist/lib/inventory.jsnu�[���PK]�\�vq����arborist/lib/retire-path.jsnu�[���PK]�\�g<����arborist/lib/edge.jsnu�[���PK]�\z����arborist/lib/calc-dep-flags.jsnu�[���PK]�\#������arborist/lib/shrinkwrap.jsnu�[���PK]�\�(�~~r�arborist/lib/reset-dep-flags.jsnu�[���PK]�\��5?�arborist/lib/debug.jsnu�[���PK]�\d�trvv��arborist/lib/override-set.jsnu�[���PK]�\��� ]s]s"H�arborist/lib/query-selector-all.jsnu�[���PK]�\l��h� � �arborist/lib/packument-cache.jsnu�[���PK]�\֛H H B*arborist/lib/peer-entry-sets.jsnu�[���PK]�\� \SGG�4arborist/lib/from-path.jsnu�[���PK]�\h't�� i9arborist/lib/version-from-tgz.jsnu�[���PK]�\E��#���?arborist/lib/node.jsnu�[���PK]�\|����arborist/lib/gather-dep-set.jsnu�[���PK]�\�}?�7�7�arborist/lib/can-place-dep.jsnu�[���PK]�\�S��K&K&)arborist/lib/diff.jsnu�[���PK]�\RF����Oarborist/lib/signal-handling.jsnu�[���PK]�\�~�ll�Xarborist/bin/virtual.jsnu�[���PK]�\�t���gZarborist/bin/lib/logging.jsnu�[���PK]�\{��ʝ�vcarborist/bin/lib/print-tree.jsnu�[���PK]�\=��adarborist/bin/lib/timers.jsnu�[���PK]�\ _���2harborist/bin/lib/options.jsnu�[���PK]�\|[�vvvtarborist/bin/funding.jsnu�[���PK]�\�no� 3yarborist/bin/index.jsnu�[���PK]�\�w'�ww��arborist/bin/audit.jsnu�[���PK]�\4N\N||F�arborist/bin/prune.jsnu�[���PK]�\n# �||�arborist/bin/reify.jsnu�[���PK]�\�р���ȕarborist/bin/license.jsnu�[���PK]�\3����ܛarborist/bin/shrinkwrap.jsnu�[���PK]�\�K�&&��arborist/bin/actual.jsnu�[���PK]�\`6fq��c�arborist/bin/ideal.jsnu�[���PK]�\��d7�E�E4�arborist/README.mdnu�[���PK]�\�r��+�arborist/LICENSE.mdnu�[���PK]�\Q~ﮝ���agent/package.jsonnu�[���PK]�\�c��k�agent/lib/proxy.jsnu�[���PK]�\0��**��agent/lib/agents.jsnu�[���PK]�\��X����agent/lib/dns.jsnu�[���PK]�\7�e���agent/lib/index.jsnu�[���PK]�\��!\rrBagent/lib/errors.jsnu�[���PK]�\�x�� � � agent/lib/options.jsnu�[���PK]�\&���@@�*promise-spawn/package.jsonnu�[���PK]�\�>�J1promise-spawn/lib/escape.jsnu�[���PK]�\A�*����7promise-spawn/lib/index.jsnu�[���PK]�\|�q����Mpromise-spawn/LICENSEnu�[���PK]�\�.� �Pmap-workspaces/package.jsonnu�[���PK]�\=�.���;Xmap-workspaces/lib/index.jsnu�[���PK]�\�r��:smap-workspaces/LICENSE.mdnu�[���PK]�\�� ���vname-from-folder/package.jsonnu�[���PK]�\^�(O��z|name-from-folder/lib/index.jsnu�[���PK]�\�����}name-from-folder/LICENSEnu�[���PK]�\��Mz��redact/package.jsonnu�[���PK]�\���2pp�redact/lib/utils.jsnu�[���PK]�\�7�EEҝredact/lib/index.jsnu�[���PK]�\��.��Z�redact/lib/server.jsnu�[���PK]�\F䖮 3�redact/lib/deep-map.jsnu�[���PK]�\����))��redact/lib/matchers.jsnu�[���PK]�\%��$$�redact/LICENSEnu�[���PK]�\xI���'T�installed-package-contents/package.jsonnu�[���PK]�\.� ss'z�installed-package-contents/lib/index.jsnu�[���PK]�\.9����"D�installed-package-contents/LICENSEnu�[���PK]�\?�UU'y�installed-package-contents/bin/index.jsnu�[���PK]�\���^^$%�installed-package-contents/README.mdnu�[���PK]�\ ��J����node-gyp/package.jsonnu�[���PK]�\�~!�]]��node-gyp/lib/index.jsnu�[���PK]�\n�r���O�fs/package.jsonnu�[���PK]�\�sY��v�fs/lib/move-file.jsnu�[���PK]�\R]Sm�/�/� fs/lib/cp/polyfill.jsnu�[���PK]�\���<<�2 fs/lib/cp/LICENSEnu�[���PK]�\��?��57 fs/lib/cp/index.jsnu�[���PK]�\`m�XD D .: fs/lib/cp/errors.jsnu�[���PK]�\��� ���G fs/lib/readdir-scoped.jsnu�[���PK]�\I����I fs/lib/index.jsnu�[���PK]�\�L�΄��J fs/lib/with-temp-dir.jsnu�[���PK]�\�i���N fs/lib/common/get-options.jsnu�[���PK]�\�#���Q fs/lib/common/node.jsnu�[���PK]�\�r�� R fs/LICENSE.mdnu�[���PK]�\\�dmmsU query/package.jsonnu�[���PK]�\��붾 � "\ query/lib/index.jsnu�[���PK]�\�r�� "} query/LICENSEnu�[���PK]�\R��|��}� run-script/package.jsonnu�[���PK]�\ϖ�& �� run-script/lib/signal-manager.jsnu�[���PK]�\�;{� run-script/lib/run-script.jsnu�[���PK]�\�S�F//!q� run-script/lib/make-spawn-args.jsnu�[���PK]�\���C::� run-script/lib/package-envs.jsnu�[���PK]�\��GI""(y� run-script/lib/node-gyp-bin/node-gyp.cmdnu�[���PK]�\�ଳ33$� run-script/lib/node-gyp-bin/node-gypnu�[���PK]�\n|K=��z� run-script/lib/set-path.jsnu�[���PK]�\E�_��"�� run-script/lib/validate-options.jsnu�[���PK]�\�O#�� �� run-script/lib/run-script-pkg.jsnu�[���PK]�\o<vX��#�� run-script/lib/is-server-package.jsnu�[���PK]�\.9����ή run-script/LICENSEnu�[���PK]�\�`�L��� git/package.jsonnu�[���PK]�\��E2mm�� git/lib/utils.jsnu�[���PK]�\T1�I]]�� git/lib/make-error.jsnu�[���PK]�\�z�IIG� git/lib/clone.jsnu�[���PK]�\�y�<�� �� git/lib/is.jsnu�[���PK]�\�]���� git/lib/spawn.jsnu�[���PK]�\�@�K��<� git/lib/index.jsnu�[���PK]�\��o���n� git/lib/is-clean.jsnu�[���PK]�\U� �CC�� git/lib/errors.jsnu�[���PK]�\�]n���� git/lib/lines-to-revs.jsnu�[���PK]�\��s2SS8� git/lib/which.jsnu�[���PK]�\[�l{<<�� git/lib/find.jsnu�[���PK]�\�D���F� git/lib/revs.jsnu�[���PK]�\���Sss� git/lib/opts.jsnu�[���PK]�\|�q����� git/LICENSEnu�[���PK]�\-�yc��� config/package.jsonnu�[���PK]�\�������� config/lib/nerf-dart.jsnu�[���PK]�\E���I I %� config/lib/definitions/definitions.jsnu�[���PK]�\���""e config/lib/definitions/index.jsnu�[���PK]�\+Jkk$�(config/lib/definitions/definition.jsnu�[���PK]�\?0E!ll:Cconfig/lib/type-defs.jsnu�[���PK]�\���<>>�Hconfig/lib/type-description.jsnu�[���PK]�\|�&-�p�pyKconfig/lib/index.jsnu�[���PK]�\�*J��x�config/lib/umask.jsnu�[���PK]�\Q_z���M�config/lib/env-replace.jsnu�[���PK]�\pm����4�config/lib/errors.jsnu�[���PK]�\[��� � ;�config/lib/set-envs.jsnu�[���PK]�\,�z7�� �config/lib/parse-field.jsnu�[���PK]�\.9�����config/LICENSEnu�[���PK]�\eW�)9&9&)�config/README.mdnu�[���PK���8�
/home/emeraadmin/www/node_modules/path-parse/../debug/../xmlbuilder/../../js/../4d695/@npmcli.zip