| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589 |
- var fs = require('fs')
- var path = require('path')
- var { glob } = require('glob')
- var normalizeData = require('normalize-package-data')
- var safeJSON = require('json-parse-even-better-errors')
- var util = require('util')
- var normalizePackageBin = require('npm-normalize-package-bin')
- module.exports = readJson
- // put more stuff on here to customize.
- readJson.extraSet = [
- bundleDependencies,
- gypfile,
- serverjs,
- scriptpath,
- authors,
- readme,
- mans,
- bins,
- githead,
- fillTypes,
- ]
- var typoWarned = {}
- var cache = {}
- function readJson (file, log_, strict_, cb_) {
- var log, strict, cb
- for (var i = 1; i < arguments.length - 1; i++) {
- if (typeof arguments[i] === 'boolean') {
- strict = arguments[i]
- } else if (typeof arguments[i] === 'function') {
- log = arguments[i]
- }
- }
- if (!log) {
- log = function () {}
- }
- cb = arguments[arguments.length - 1]
- readJson_(file, log, strict, cb)
- }
- function readJson_ (file, log, strict, cb) {
- fs.readFile(file, 'utf8', function (er, d) {
- parseJson(file, er, d, log, strict, cb)
- })
- }
- function stripBOM (content) {
- // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
- // because the buffer-to-string conversion in `fs.readFileSync()`
- // translates it to FEFF, the UTF-16 BOM.
- if (content.charCodeAt(0) === 0xFEFF) {
- content = content.slice(1)
- }
- return content
- }
- function jsonClone (obj) {
- if (obj == null) {
- return obj
- } else if (Array.isArray(obj)) {
- var newarr = new Array(obj.length)
- for (var ii in obj) {
- newarr[ii] = jsonClone(obj[ii])
- }
- return newarr
- } else if (typeof obj === 'object') {
- var newobj = {}
- for (var kk in obj) {
- newobj[kk] = jsonClone(obj[kk])
- }
- return newobj
- } else {
- return obj
- }
- }
- function parseJson (file, er, d, log, strict, cb) {
- if (er && er.code === 'ENOENT') {
- return fs.stat(path.dirname(file), function (err, stat) {
- if (!err && stat && !stat.isDirectory()) {
- // ENOTDIR isn't used on Windows, but npm expects it.
- er = Object.create(er)
- er.code = 'ENOTDIR'
- return cb(er)
- } else {
- return indexjs(file, er, log, strict, cb)
- }
- })
- }
- if (er) {
- return cb(er)
- }
- if (cache[d]) {
- return cb(null, jsonClone(cache[d]))
- }
- var data
- try {
- data = safeJSON(stripBOM(d))
- for (var key in data) {
- if (/^_/.test(key)) {
- delete data[key]
- }
- }
- } catch (jsonErr) {
- data = parseIndex(d)
- if (!data) {
- return cb(parseError(jsonErr, file))
- }
- }
- extrasCached(file, d, data, log, strict, cb)
- }
- function extrasCached (file, d, data, log, strict, cb) {
- extras(file, data, log, strict, function (err, extrasData) {
- if (!err) {
- cache[d] = jsonClone(extrasData)
- }
- cb(err, extrasData)
- })
- }
- function indexjs (file, er, log, strict, cb) {
- if (path.basename(file) === 'index.js') {
- return cb(er)
- }
- var index = path.resolve(path.dirname(file), 'index.js')
- fs.readFile(index, 'utf8', function (er2, d) {
- if (er2) {
- return cb(er)
- }
- if (cache[d]) {
- return cb(null, cache[d])
- }
- var data = parseIndex(d)
- if (!data) {
- return cb(er)
- }
- extrasCached(file, d, data, log, strict, cb)
- })
- }
- readJson.extras = extras
- function extras (file, data, log_, strict_, cb_) {
- var log, strict, cb
- for (var i = 2; i < arguments.length - 1; i++) {
- if (typeof arguments[i] === 'boolean') {
- strict = arguments[i]
- } else if (typeof arguments[i] === 'function') {
- log = arguments[i]
- }
- }
- if (!log) {
- log = function () {}
- }
- cb = arguments[i]
- var set = readJson.extraSet
- var n = set.length
- var errState = null
- set.forEach(function (fn) {
- fn(file, data, then)
- })
- function then (er) {
- if (errState) {
- return
- }
- if (er) {
- return cb(errState = er)
- }
- if (--n > 0) {
- return
- }
- final(file, data, log, strict, cb)
- }
- }
- function scriptpath (file, data, cb) {
- if (!data.scripts) {
- return cb(null, data)
- }
- var k = Object.keys(data.scripts)
- k.forEach(scriptpath_, data.scripts)
- cb(null, data)
- }
- function scriptpath_ (key) {
- var s = this[key]
- // This is never allowed, and only causes problems
- if (typeof s !== 'string') {
- return delete this[key]
- }
- var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
- if (s.match(spre)) {
- this[key] = this[key].replace(spre, '')
- }
- }
- function gypfile (file, data, cb) {
- var dir = path.dirname(file)
- var s = data.scripts || {}
- if (s.install || s.preinstall) {
- return cb(null, data)
- }
- if (data.gypfile === false) {
- return cb(null, data)
- }
- glob('*.gyp', { cwd: dir })
- .then(files => gypfile_(file, data, files, cb))
- .catch(er => cb(er))
- }
- function gypfile_ (file, data, files, cb) {
- if (!files.length) {
- return cb(null, data)
- }
- var s = data.scripts || {}
- s.install = 'node-gyp rebuild'
- data.scripts = s
- data.gypfile = true
- return cb(null, data)
- }
- function serverjs (file, data, cb) {
- var dir = path.dirname(file)
- var s = data.scripts || {}
- if (s.start) {
- return cb(null, data)
- }
- fs.access(path.join(dir, 'server.js'), (err) => {
- if (!err) {
- s.start = 'node server.js'
- data.scripts = s
- }
- return cb(null, data)
- })
- }
- function authors (file, data, cb) {
- if (data.contributors) {
- return cb(null, data)
- }
- var af = path.resolve(path.dirname(file), 'AUTHORS')
- fs.readFile(af, 'utf8', function (er, ad) {
- // ignore error. just checking it.
- if (er) {
- return cb(null, data)
- }
- authors_(file, data, ad, cb)
- })
- }
- function authors_ (file, data, ad, cb) {
- ad = ad.split(/\r?\n/g).map(function (line) {
- return line.replace(/^\s*#.*$/, '').trim()
- }).filter(function (line) {
- return line
- })
- data.contributors = ad
- return cb(null, data)
- }
- function readme (file, data, cb) {
- if (data.readme) {
- return cb(null, data)
- }
- var dir = path.dirname(file)
- var globOpts = { cwd: dir, nocase: true, mark: true }
- glob('{README,README.*}', globOpts)
- .then(files => {
- // don't accept directories.
- files = files.filter(function (filtered) {
- return !filtered.match(/\/$/)
- })
- if (!files.length) {
- return cb()
- }
- var fn = preferMarkdownReadme(files)
- var rm = path.resolve(dir, fn)
- return readme_(file, data, rm, cb)
- })
- .catch(er => cb(er))
- }
- function preferMarkdownReadme (files) {
- var fallback = 0
- var re = /\.m?a?r?k?d?o?w?n?$/i
- for (var i = 0; i < files.length; i++) {
- if (files[i].match(re)) {
- return files[i]
- } else if (files[i].match(/README$/)) {
- fallback = i
- }
- }
- // prefer README.md, followed by README; otherwise, return
- // the first filename (which could be README)
- return files[fallback]
- }
- function readme_ (file, data, rm, cb) {
- var rmfn = path.basename(rm)
- fs.readFile(rm, 'utf8', function (er, rmData) {
- // maybe not readable, or something.
- if (er) {
- return cb()
- }
- data.readme = rmData
- data.readmeFilename = rmfn
- return cb(er, data)
- })
- }
- function mans (file, data, cb) {
- let cwd = data.directories && data.directories.man
- if (data.man || !cwd) {
- return cb(null, data)
- }
- const dirname = path.dirname(file)
- cwd = path.resolve(path.dirname(file), cwd)
- glob('**/*.[0-9]', { cwd })
- .then(mansGlob => {
- data.man = mansGlob.map(man =>
- path.relative(dirname, path.join(cwd, man)).split(path.sep).join('/')
- )
- return cb(null, data)
- })
- .catch(er => cb(er))
- }
- function bins (file, data, cb) {
- data = normalizePackageBin(data)
- var m = data.directories && data.directories.bin
- if (data.bin || !m) {
- return cb(null, data)
- }
- m = path.resolve(path.dirname(file), path.join('.', path.join('/', m)))
- glob('**', { cwd: m })
- .then(binsGlob => bins_(file, data, binsGlob, cb))
- .catch(er => cb(er))
- }
- function bins_ (file, data, binsGlob, cb) {
- var m = (data.directories && data.directories.bin) || '.'
- data.bin = binsGlob.reduce(function (acc, mf) {
- if (mf && mf.charAt(0) !== '.') {
- var f = path.basename(mf)
- acc[f] = path.join(m, mf)
- }
- return acc
- }, {})
- return cb(null, normalizePackageBin(data))
- }
- function bundleDependencies (file, data, cb) {
- var bd = 'bundleDependencies'
- var bdd = 'bundledDependencies'
- // normalize key name
- if (data[bdd] !== undefined) {
- if (data[bd] === undefined) {
- data[bd] = data[bdd]
- }
- delete data[bdd]
- }
- if (data[bd] === false) {
- delete data[bd]
- } else if (data[bd] === true) {
- data[bd] = Object.keys(data.dependencies || {})
- } else if (data[bd] !== undefined && !Array.isArray(data[bd])) {
- delete data[bd]
- }
- return cb(null, data)
- }
- function githead (file, data, cb) {
- if (data.gitHead) {
- return cb(null, data)
- }
- var dir = path.dirname(file)
- var head = path.resolve(dir, '.git/HEAD')
- fs.readFile(head, 'utf8', function (er, headData) {
- if (er) {
- var parent = path.dirname(dir)
- if (parent === dir) {
- return cb(null, data)
- }
- return githead(dir, data, cb)
- }
- githead_(data, dir, headData, cb)
- })
- }
- function githead_ (data, dir, head, cb) {
- if (!head.match(/^ref: /)) {
- data.gitHead = head.trim()
- return cb(null, data)
- }
- var headRef = head.replace(/^ref: /, '').trim()
- var headFile = path.resolve(dir, '.git', headRef)
- fs.readFile(headFile, 'utf8', function (er, headData) {
- if (er || !headData) {
- var packFile = path.resolve(dir, '.git/packed-refs')
- return fs.readFile(packFile, 'utf8', function (readFileErr, refs) {
- if (readFileErr || !refs) {
- return cb(null, data)
- }
- refs = refs.split('\n')
- for (var i = 0; i < refs.length; i++) {
- var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
- if (match && match[2].trim() === headRef) {
- data.gitHead = match[1]
- break
- }
- }
- return cb(null, data)
- })
- }
- headData = headData.replace(/^ref: /, '').trim()
- data.gitHead = headData
- return cb(null, data)
- })
- }
- /**
- * Warn if the bin references don't point to anything. This might be better in
- * normalize-package-data if it had access to the file path.
- */
- function checkBinReferences_ (file, data, warn, cb) {
- if (!(data.bin instanceof Object)) {
- return cb()
- }
- var keys = Object.keys(data.bin)
- var keysLeft = keys.length
- if (!keysLeft) {
- return cb()
- }
- function handleExists (relName, result) {
- keysLeft--
- if (!result) {
- warn('No bin file found at ' + relName)
- }
- if (!keysLeft) {
- cb()
- }
- }
- keys.forEach(function (key) {
- var dirName = path.dirname(file)
- var relName = data.bin[key]
- /* istanbul ignore if - impossible, bins have been normalized */
- if (typeof relName !== 'string') {
- var msg = 'Bin filename for ' + key +
- ' is not a string: ' + util.inspect(relName)
- warn(msg)
- delete data.bin[key]
- handleExists(relName, true)
- return
- }
- var binPath = path.resolve(dirName, relName)
- fs.stat(binPath, (err) => handleExists(relName, !err))
- })
- }
- function final (file, data, log, strict, cb) {
- var pId = makePackageId(data)
- function warn (msg) {
- if (typoWarned[pId]) {
- return
- }
- if (log) {
- log('package.json', pId, msg)
- }
- }
- try {
- normalizeData(data, warn, strict)
- } catch (error) {
- return cb(error)
- }
- checkBinReferences_(file, data, warn, function () {
- typoWarned[pId] = true
- cb(null, data)
- })
- }
- function fillTypes (file, data, cb) {
- var index = data.main || 'index.js'
- if (typeof index !== 'string') {
- return cb(new TypeError('The "main" attribute must be of type string.'))
- }
- // TODO exports is much more complicated than this in verbose format
- // We need to support for instance
- // "exports": {
- // ".": [
- // {
- // "default": "./lib/npm.js"
- // },
- // "./lib/npm.js"
- // ],
- // "./package.json": "./package.json"
- // },
- // as well as conditional exports
- // if (data.exports && typeof data.exports === 'string') {
- // index = data.exports
- // }
- // if (data.exports && data.exports['.']) {
- // index = data.exports['.']
- // if (typeof index !== 'string') {
- // }
- // }
- var extless =
- path.join(path.dirname(index), path.basename(index, path.extname(index)))
- var dts = `./${extless}.d.ts`
- var dtsPath = path.join(path.dirname(file), dts)
- var hasDTSFields = 'types' in data || 'typings' in data
- if (!hasDTSFields && fs.existsSync(dtsPath)) {
- data.types = dts.split(path.sep).join('/')
- }
- cb(null, data)
- }
- function makePackageId (data) {
- var name = cleanString(data.name)
- var ver = cleanString(data.version)
- return name + '@' + ver
- }
- function cleanString (str) {
- return (!str || typeof (str) !== 'string') ? '' : str.trim()
- }
- // /**package { "name": "foo", "version": "1.2.3", ... } **/
- function parseIndex (data) {
- data = data.split(/^\/\*\*package(?:\s|$)/m)
- if (data.length < 2) {
- return null
- }
- data = data[1]
- data = data.split(/\*\*\/$/m)
- if (data.length < 2) {
- return null
- }
- data = data[0]
- data = data.replace(/^\s*\*/mg, '')
- try {
- return safeJSON(data)
- } catch (er) {
- return null
- }
- }
- function parseError (ex, file) {
- var e = new Error('Failed to parse json\n' + ex.message)
- e.code = 'EJSONPARSE'
- e.path = file
- return e
- }
|