/
build-ideal-tree.js
1771 lines (1575 loc) · 67.5 KB
/
build-ideal-tree.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// mixin implementing the buildIdealTree method
const rpj = require('read-package-json-fast')
const npa = require('npm-package-arg')
const pacote = require('pacote')
const cacache = require('cacache')
const semver = require('semver')
const promiseCallLimit = require('promise-call-limit')
const getPeerSet = require('../peer-set.js')
const realpath = require('../../lib/realpath.js')
const { resolve } = require('path')
const { promisify } = require('util')
const treeCheck = require('../tree-check.js')
const readdir = promisify(require('readdir-scoped-modules'))
const debug = require('../debug.js')
const fromPath = require('../from-path.js')
const calcDepFlags = require('../calc-dep-flags.js')
const Shrinkwrap = require('../shrinkwrap.js')
const Node = require('../node.js')
const Link = require('../link.js')
const addRmPkgDeps = require('../add-rm-pkg-deps.js')
const gatherDepSet = require('../gather-dep-set.js')
const optionalSet = require('../optional-set.js')
const {checkEngine, checkPlatform} = require('npm-install-checks')
// enum of return values for canPlaceDep.
// No, this is a conflict, you may not put that package here
const CONFLICT = Symbol('CONFLICT')
// Yes, this is fine, and should not be a problem
const OK = Symbol('OK')
// No need, because the package already here is fine
const KEEP = Symbol('KEEP')
// Yes, clobber the package that is already here
const REPLACE = Symbol('REPLACE')
const relpath = require('../relpath.js')
// note: some of these symbols are shared so we can hit
// them with unit tests and reuse them across mixins
const _complete = Symbol('complete')
const _depsSeen = Symbol('depsSeen')
const _depsQueue = Symbol('depsQueue')
const _currentDep = Symbol('currentDep')
const _updateAll = Symbol('updateAll')
const _mutateTree = Symbol('mutateTree')
const _flagsSuspect = Symbol.for('flagsSuspect')
const _prune = Symbol('prune')
const _preferDedupe = Symbol('preferDedupe')
const _legacyBundling = Symbol('legacyBundling')
const _parseSettings = Symbol('parseSettings')
const _initTree = Symbol('initTree')
const _applyUserRequests = Symbol('applyUserRequests')
const _inflateAncientLockfile = Symbol('inflateAncientLockfile')
const _buildDeps = Symbol('buildDeps')
const _buildDepStep = Symbol('buildDepStep')
const _nodeFromEdge = Symbol('nodeFromEdge')
const _nodeFromSpec = Symbol('nodeFromSpec')
const _fetchManifest = Symbol('fetchManifest')
const _problemEdges = Symbol('problemEdges')
const _manifests = Symbol('manifests')
const _loadWorkspaces = Symbol.for('loadWorkspaces')
const _linkFromSpec = Symbol('linkFromSpec')
const _loadPeerSet = Symbol('loadPeerSet')
const _updateNames = Symbol.for('updateNames')
const _placeDep = Symbol.for('placeDep')
const _canPlaceDep = Symbol.for('canPlaceDep')
const _canPlacePeers = Symbol('canPlacePeers')
const _pruneForReplacement = Symbol('pruneForReplacement')
const _fixDepFlags = Symbol('fixDepFlags')
const _resolveLinks = Symbol('resolveLinks')
const _rootNodeFromPackage = Symbol('rootNodeFromPackage')
const _add = Symbol('add')
const _resolvedAdd = Symbol.for('resolvedAdd')
const _queueNamedUpdates = Symbol('queueNamedUpdates')
const _queueVulnDependents = Symbol('queueVulnDependents')
const _avoidRange = Symbol('avoidRange')
const _shouldUpdateNode = Symbol('shouldUpdateNode')
const resetDepFlags = require('../reset-dep-flags.js')
const _loadFailures = Symbol('loadFailures')
const _pruneFailedOptional = Symbol('pruneFailedOptional')
const _linkNodes = Symbol('linkNodes')
const _follow = Symbol('follow')
const _globalStyle = Symbol('globalStyle')
const _globalRootNode = Symbol('globalRootNode')
const _isVulnerable = Symbol.for('isVulnerable')
const _usePackageLock = Symbol.for('usePackageLock')
const _rpcache = Symbol.for('realpathCache')
const _stcache = Symbol.for('statCache')
const _updateFilePath = Symbol('updateFilePath')
const _followSymlinkPath = Symbol('followSymlinkPath')
const _getRelpathSpec = Symbol('getRelpathSpec')
const _retrieveSpecName = Symbol('retrieveSpecName')
const _strictPeerDeps = Symbol('strictPeerDeps')
const _checkEngineAndPlatform = Symbol('checkEngineAndPlatform')
const _checkEngine = Symbol('checkEngine')
const _checkPlatform = Symbol('checkPlatform')
const _virtualRoots = Symbol('virtualRoots')
const _virtualRoot = Symbol('virtualRoot')
// used for the ERESOLVE error to show the last peer conflict encountered
const _peerConflict = Symbol('peerConflict')
const _failPeerConflict = Symbol('failPeerConflict')
const _explainPeerConflict = Symbol('explainPeerConflict')
const _warnPeerConflict = Symbol('warnPeerConflict')
const _edgesOverridden = Symbol('edgesOverridden')
// exposed symbol for unit testing the placeDep method directly
const _peerSetSource = Symbol.for('peerSetSource')
// used by Reify mixin
const _force = Symbol.for('force')
const _explicitRequests = Symbol.for('explicitRequests')
const _global = Symbol.for('global')
const _idealTreePrune = Symbol.for('idealTreePrune')
module.exports = cls => class IdealTreeBuilder extends cls {
constructor (options) {
super(options)
// normalize trailing slash
const registry = options.registry || 'https://registry.npmjs.org'
options.registry = this.registry = registry.replace(/\/+$/, '') + '/'
const {
idealTree = null,
global = false,
follow = false,
globalStyle = false,
legacyPeerDeps = false,
force = false,
packageLock = true,
strictPeerDeps = false,
} = options
this[_force] = !!force
this[_strictPeerDeps] = !!strictPeerDeps
this.idealTree = idealTree
this.legacyPeerDeps = legacyPeerDeps
this[_usePackageLock] = packageLock
this[_global] = !!global
this[_globalStyle] = this[_global] || globalStyle
this[_follow] = !!follow
this[_explicitRequests] = new Set()
this[_preferDedupe] = false
this[_legacyBundling] = false
this[_depsSeen] = new Set()
this[_depsQueue] = []
this[_currentDep] = null
this[_updateNames] = []
this[_updateAll] = false
this[_mutateTree] = false
this[_loadFailures] = new Set()
this[_linkNodes] = new Set()
this[_manifests] = new Map()
this[_peerConflict] = null
this[_edgesOverridden] = new Set()
// a map of each module in a peer set to the thing that depended on
// that set of peers in the first place. Use a WeakMap so that we
// don't hold onto references for nodes that are garbage collected.
this[_peerSetSource] = new WeakMap()
this[_virtualRoots] = new Map()
}
get explicitRequests () {
return new Set(this[_explicitRequests])
}
// public method
async buildIdealTree (options = {}) {
if (this.idealTree)
return Promise.resolve(this.idealTree)
// allow the user to set reify options on the ctor as well.
// XXX: deprecate separate reify() options object.
options = { ...this.options, ...options }
// an empty array or any falsey value is the same as null
if (!options.add || options.add.length === 0)
options.add = null
if (!options.rm || options.rm.length === 0)
options.rm = null
process.emit('time', 'idealTree')
if (!options.add && !options.rm && !options.update && this[_global]) {
const er = new Error('global requires add, rm, or update option')
return Promise.reject(er)
}
// first get the virtual tree, if possible. If there's a lockfile, then
// that defines the ideal tree, unless the root package.json is not
// satisfied by what the ideal tree provides.
// from there, we start adding nodes to it to satisfy the deps requested
// by the package.json in the root.
this[_parseSettings](options)
// start tracker block
this.addTracker('idealTree')
try {
await this[_initTree]()
await this[_applyUserRequests](options)
await this[_inflateAncientLockfile]()
await this[_buildDeps]()
await this[_fixDepFlags]()
await this[_pruneFailedOptional]()
await this[_checkEngineAndPlatform]()
} finally {
process.emit('timeEnd', 'idealTree')
this.finishTracker('idealTree')
}
return treeCheck(this.idealTree)
}
[_checkEngineAndPlatform] () {
// engine/platform checks throw, so start the promise chain off first
return Promise.resolve()
.then(() => {
for (const node of this.idealTree.inventory.values()) {
if (!node.optional) {
this[_checkEngine](node)
this[_checkPlatform](node)
}
}
})
}
[_checkPlatform] (node) {
checkPlatform(node.package, this[_force])
}
[_checkEngine] (node) {
const { engineStrict, npmVersion, nodeVersion } = this.options
const c = () => checkEngine(node.package, npmVersion, nodeVersion, this[_force])
if (engineStrict)
c()
else {
try {
c()
} catch (er) {
this.log.warn(er.code, er.message, {
package: er.pkgid,
required: er.required,
current: er.current,
})
}
}
}
[_parseSettings] (options) {
const update = options.update === true ? { all: true }
: Array.isArray(options.update) ? { names: options.update }
: options.update || {}
if (update.all || !Array.isArray(update.names))
update.names = []
this[_complete] = !!options.complete
this[_preferDedupe] = !!options.preferDedupe
this[_legacyBundling] = !!options.legacyBundling
this[_updateNames] = update.names
this[_updateAll] = update.all
// we prune by default unless explicitly set to boolean false
this[_prune] = options.prune !== false
// set if we add anything, but also set here if we know we'll make
// changes and thus have to maybe prune later.
this[_mutateTree] = !!(
options.add ||
options.rm ||
update.all ||
update.names.length
)
}
// load the initial tree, either the virtualTree from a shrinkwrap,
// or just the root node from a package.json
[_initTree] () {
process.emit('time', 'idealTree:init')
return (
this[_global] ? this[_globalRootNode]()
: rpj(this.path + '/package.json').then(
pkg => this[_rootNodeFromPackage](pkg),
er => {
if (er.code === 'EJSONPARSE')
throw er
return this[_rootNodeFromPackage]({})
}
))
.then(root => this[_loadWorkspaces](root))
// ok to not have a virtual tree. probably initial install.
// When updating all, we load the shrinkwrap, but don't bother
// to build out the full virtual tree from it, since we'll be
// reconstructing it anyway.
.then(root => this[_global] ? root
: !this[_usePackageLock] || this[_updateAll]
? Shrinkwrap.reset({ path: this.path })
.then(meta => Object.assign(root, {meta}))
: this.loadVirtual({ root }))
// if we don't have a lockfile to go from, then start with the
// actual tree, so we only make the minimum required changes.
// don't do this for global installs or updates, because in those
// cases we don't use a lockfile anyway.
// Load on a new Arborist object, so the Nodes aren't the same,
// or else it'll get super confusing when we change them!
.then(async root => {
if (!this[_updateAll] && !this[_global] && !root.meta.loadedFromDisk) {
await new this.constructor(this.options).loadActual({ root })
const tree = root.target || root
// even though we didn't load it from a package-lock.json FILE,
// we still loaded it "from disk", meaning we have to reset
// dep flags before assuming that any mutations were reflected.
if (tree.children.size)
root.meta.loadedFromDisk = true
}
return root
})
.then(tree => {
// null the virtual tree, because we're about to hack away at it
// if you want another one, load another copy.
this.idealTree = tree
this.virtualTree = null
process.emit('timeEnd', 'idealTree:init')
})
}
async [_globalRootNode] () {
const root = await this[_rootNodeFromPackage]({ dependencies: {} })
// this is a gross kludge to handle the fact that we don't save
// metadata on the root node in global installs, because the "root"
// node is something like /usr/local/lib.
const meta = new Shrinkwrap({ path: this.path })
meta.reset()
root.meta = meta
return root
}
async [_rootNodeFromPackage] (pkg) {
// if the path doesn't exist, then we explode at this point. Note that
// this is not a problem for reify(), since it creates the root path
// before ever loading trees.
// TODO: make buildIdealTree() and loadActual handle a missing root path,
// or a symlink to a missing target, and let reify() create it as needed.
const real = await realpath(this.path, this[_rpcache], this[_stcache])
const Cls = real === this.path ? Node : Link
const root = new Cls({
path: this.path,
realpath: real,
pkg,
extraneous: false,
dev: false,
devOptional: false,
peer: false,
optional: false,
global: this[_global],
legacyPeerDeps: this.legacyPeerDeps,
})
if (root.isLink) {
root.target = new Node({
path: real,
realpath: real,
pkg,
extraneous: false,
dev: false,
devOptional: false,
peer: false,
optional: false,
global: this[_global],
legacyPeerDeps: this.legacyPeerDeps,
root,
})
}
return root
}
// process the add/rm requests by modifying the root node, and the
// update.names request by queueing nodes dependent on those named.
async [_applyUserRequests] (options) {
process.emit('time', 'idealTree:userRequests')
const tree = this.idealTree.target || this.idealTree
// If we have a list of package names to update, and we know it's
// going to update them wherever they are, add any paths into those
// named nodes to the buildIdealTree queue.
if (!this[_global] && this[_updateNames].length)
this[_queueNamedUpdates]()
// global updates only update the globalTop nodes, but we need to know
// that they're there, and not reinstall the world unnecessarily.
if (this[_global] && (this[_updateAll] || this[_updateNames].length)) {
const nm = resolve(this.path, 'node_modules')
for (const name of await readdir(nm).catch(() => [])) {
if (this[_updateNames].includes(name))
this[_explicitRequests].add(name)
tree.package.dependencies = tree.package.dependencies || {}
if (this[_updateAll] || this[_updateNames].includes(name))
tree.package.dependencies[name] = '*'
}
}
if (this.auditReport && this.auditReport.size > 0)
this[_queueVulnDependents](options)
if (options.rm && options.rm.length) {
addRmPkgDeps.rm(tree.package, options.rm)
for (const name of options.rm)
this[_explicitRequests].add(name)
}
if (options.add)
await this[_add](options)
// triggers a refresh of all edgesOut
if (options.add && options.add.length || options.rm && options.rm.length || this[_global])
tree.package = tree.package
process.emit('timeEnd', 'idealTree:userRequests')
}
// This returns a promise because we might not have the name yet,
// and need to call pacote.manifest to find the name.
[_add] ({add, saveType = null, saveBundle = false}) {
// get the name for each of the specs in the list.
// ie, doing `foo@bar` we just return foo
// but if it's a url or git, we don't know the name until we
// fetch it and look in its manifest.
return Promise.all(add.map(rawSpec => {
// We do NOT provide the path here, because user-additions need
// to be resolved relative to the CWD the user is in.
return this[_retrieveSpecName](npa(rawSpec))
.then(add => this[_updateFilePath](add))
.then(add => this[_followSymlinkPath](add))
})).then(add => {
this[_resolvedAdd] = add
// now add is a list of spec objects with names.
// find a home for each of them!
const tree = this.idealTree.target || this.idealTree
addRmPkgDeps.add({
pkg: tree.package,
add,
saveBundle,
saveType,
path: this.path,
})
for (const spec of add)
this[_explicitRequests].add(spec.name)
})
}
async [_retrieveSpecName] (spec) {
// if it's just @'' then we reload whatever's there, or get latest
// if it's an explicit tag, we need to install that specific tag version
const isTag = spec.rawSpec && spec.type === 'tag'
if (spec.name && !isTag)
return spec
const mani = await pacote.manifest(spec, { ...this.options })
// if it's a tag type, then we need to run it down to an actual version
if (isTag)
return npa(`${mani.name}@${mani.version}`)
spec.name = mani.name
return spec
}
async [_updateFilePath] (spec) {
if (spec.type === 'file')
spec = this[_getRelpathSpec](spec, spec.fetchSpec)
return spec
}
async [_followSymlinkPath] (spec) {
if (spec.type === 'directory') {
const real = await (
realpath(spec.fetchSpec, this[_rpcache], this[_stcache])
// TODO: create synthetic test case to simulate realpath failure
.catch(/* istanbul ignore next */() => null)
)
spec = this[_getRelpathSpec](spec, real)
}
return spec
}
[_getRelpathSpec] (spec, filepath) {
/* istanbul ignore else - should also be covered by realpath failure */
if (filepath) {
const { name } = spec
const tree = this.idealTree.target || this.idealTree
spec = npa(`file:${relpath(tree.path, filepath)}`, tree.path)
spec.name = name
}
return spec
}
// TODO: provide a way to fix bundled deps by exposing metadata about
// what's in the bundle at each published manifest. Without that, we
// can't possibly fix bundled deps without breaking a ton of other stuff,
// and leaving the user subject to getting it overwritten later anyway.
[_queueVulnDependents] (options) {
for (const {nodes} of this.auditReport.values()) {
for (const node of nodes) {
const bundler = node.getBundler()
// XXX this belongs in the audit report itself, not here.
// We shouldn't even get these things here, and they shouldn't
// be printed by npm-audit-report as if they can be fixed, because
// they can't.
if (bundler) {
this.log.warn(`audit fix ${node.name}@${node.version}`,
`${node.location}\nis a bundled dependency of\n${
bundler.name}@${bundler.version} at ${bundler.location}\n` +
'It cannot be fixed automatically.\n' +
`Check for updates to the ${bundler.name} package.`)
continue
}
for (const edge of node.edgesIn) {
this.addTracker('idealTree', edge.from.name, edge.from.location)
this[_depsQueue].push(edge.from)
}
}
}
// note any that can't be fixed at the root level without --force
// if there's a fix, we use that. otherwise, the user has to remove it,
// find a different thing, fix the upstream, etc.
//
// XXX: how to handle top nodes that aren't the root? Maybe the report
// just tells the user to cd into that directory and fix it?
if (this[_force] && this.auditReport && this.auditReport.topVulns.size) {
options.add = options.add || []
options.rm = options.rm || []
for (const [name, topVuln] of this.auditReport.topVulns.entries()) {
const {
simpleRange,
topNodes,
fixAvailable,
} = topVuln
for (const node of topNodes) {
if (node !== this.idealTree && node !== this.idealTree.target) {
// not something we're going to fix, sorry. have to cd into
// that directory and fix it yourself.
this.log.warn('audit', 'Manual fix required in linked project ' +
`at ./${node.location} for ${name}@${simpleRange}.\n` +
`'cd ./${node.location}' and run 'npm audit' for details.`)
continue
}
if (!fixAvailable) {
this.log.warn('audit', `No fix available for ${name}@${simpleRange}`)
continue
}
const { isSemVerMajor, version } = fixAvailable
const breakingMessage = isSemVerMajor
? 'a SemVer major change'
: 'outside your stated dependency range'
this.log.warn('audit', `Updating ${name} to ${version},` +
`which is ${breakingMessage}.`)
options.add.push(`${name}@${version}`)
}
}
}
}
[_isVulnerable] (node) {
return this.auditReport && this.auditReport.isVulnerable(node)
}
[_avoidRange] (name) {
if (!this.auditReport)
return null
const vuln = this.auditReport.get(name)
if (!vuln)
return null
return vuln.range
}
[_queueNamedUpdates] () {
// ignore top nodes, since they are not loaded the same way, and
// probably have their own project associated with them.
// for every node with one of the names on the list, we add its
// dependents to the queue to be evaluated. in buildDepStem,
// anything on the update names list will get refreshed, even if
// it isn't a problem.
// XXX this could be faster by doing a series of inventory.query('name')
// calls rather than walking over everything in the tree.
const set = this.idealTree.inventory
.filter(n => this[_shouldUpdateNode](n))
for (const node of set) {
for (const edge of node.edgesIn) {
this.addTracker('idealTree', edge.from.name, edge.from.location)
this[_depsQueue].push(edge.from)
}
}
}
[_shouldUpdateNode] (node) {
return this[_updateNames].includes(node.name) &&
!node.isTop &&
!node.inDepBundle &&
!node.inShrinkwrap
}
async [_inflateAncientLockfile] () {
const { meta, inventory } = this.idealTree
const ancient = meta.ancientLockfile
const old = meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2)
if (inventory.size === 0 || !ancient && !(old && this[_complete]))
return
// if the lockfile is from node v5 or earlier, then we'll have to reload
// all the manifests of everything we encounter. this is costly, but at
// least it's just a one-time hit.
process.emit('time', 'idealTree:inflate')
const heading = ancient ? 'ancient lockfile' : 'old lockfile'
this.log.warn(heading,
`
The ${meta.type} file was created with an old version of npm,
so supplemental metadata must be fetched from the registry.
This is a one-time fix-up, please be patient...
`)
this.addTracker('idealTree:inflate')
const queue = []
for (const node of inventory.values()) {
if (node.isProjectRoot)
continue
queue.push(async () => {
this.log.silly('inflate', node.location)
const { resolved, version, path, name, location, integrity } = node
// don't try to hit the registry for linked deps
const useResolved = !version ||
resolved && resolved.startsWith('file:')
const id = useResolved ? resolved : version
const spec = npa.resolve(name, id, path)
const sloc = location.substr('node_modules/'.length)
const t = `idealTree:inflate:${sloc}`
this.addTracker(t)
await pacote.manifest(spec, {
...this.options,
resolved: resolved,
integrity: integrity,
fullMetadata: false,
}).then(mani => {
node.package = { ...mani, _id: `${mani.name}@${mani.version}` }
}).catch((er) => {
const warning = `Could not fetch metadata for ${name}@${id}`
this.log.warn(heading, warning, er)
})
this.finishTracker(t)
})
}
await promiseCallLimit(queue)
// have to re-calc dep flags, because the nodes don't have edges
// until their packages get assigned, so everything looks extraneous
calcDepFlags(this.idealTree)
// yes, yes, this isn't the "original" version, but now that it's been
// upgraded, we need to make sure we don't do the work to upgrade it
// again, since it's now as new as can be.
meta.originalLockfileVersion = 2
this.finishTracker('idealTree:inflate')
process.emit('timeEnd', 'idealTree:inflate')
}
// at this point we have a virtual tree with the actual root node's
// package deps, which may be partly or entirely incomplete, invalid
// or extraneous.
[_buildDeps] () {
process.emit('time', 'idealTree:buildDeps')
const tree = this.idealTree.target || this.idealTree
this[_depsQueue].push(tree)
this.log.silly('idealTree', 'buildDeps')
this.addTracker('idealTree', tree.name, '')
return this[_buildDepStep]()
.then(() => process.emit('timeEnd', 'idealTree:buildDeps'))
}
async [_buildDepStep] () {
// removes tracker of previous dependency in the queue
if (this[_currentDep]) {
const { location, name } = this[_currentDep]
process.emit('timeEnd', `idealTree:${location || '#root'}`)
this.finishTracker('idealTree', name, location)
this[_currentDep] = null
}
if (!this[_depsQueue].length)
return this[_resolveLinks]()
// sort physically shallower deps up to the front of the queue,
// because they'll affect things deeper in, then alphabetical
this[_depsQueue].sort((a, b) =>
(a.depth - b.depth) || a.path.localeCompare(b.path))
const node = this[_depsQueue].shift()
const bd = node.package.bundleDependencies
const hasBundle = bd && Array.isArray(bd) && bd.length
const { hasShrinkwrap } = node
// if the node was already visited, or has since been removed from the
// tree, skip over it and process the rest of the queue. If a node has
// a shrinkwrap, also skip it, because it's going to get its deps
// satisfied by whatever's in that file anyway.
if (this[_depsSeen].has(node) ||
node.root !== this.idealTree ||
hasShrinkwrap && !this[_complete])
return this[_buildDepStep]()
this[_depsSeen].add(node)
this[_currentDep] = node
process.emit('time', `idealTree:${node.location || '#root'}`)
// if we're loading a _complete_ ideal tree, for a --package-lock-only
// installation for example, we have to crack open the tarball and
// look inside if it has bundle deps or shrinkwraps. note that this is
// not necessary during a reification, because we just update the
// ideal tree by reading bundles/shrinkwraps in place.
// Don't bother if the node is from the actual tree and hasn't
// been resolved, because we can't fetch it anyway, could be anything!
const crackOpen = this[_complete] &&
node !== this.idealTree &&
node.resolved &&
(hasBundle || hasShrinkwrap)
if (crackOpen) {
const Arborist = this.constructor
const opt = { ...this.options }
await cacache.tmp.withTmp(this.cache, opt, async path => {
await pacote.extract(node.resolved, path, opt)
if (hasShrinkwrap) {
await new Arborist({ ...this.options, path })
.loadVirtual({ root: node })
}
if (hasBundle) {
await new Arborist({ ...this.options, path })
.loadActual({ root: node, ignoreMissing: true })
}
})
}
// if any deps are missing or invalid, then we fetch the manifest for
// the thing we want, and build a new dep node from that.
// Then, find the ideal placement for that node. The ideal placement
// searches from the node's deps (or parent deps in the case of non-root
// peer deps), and walks up the tree until it finds the highest spot
// where it doesn't cause any conflicts.
//
// A conflict can be:
// - A node by that name already exists at that location.
// - The parent has a peer dep on that name
// - One of the node's peer deps conflicts at that location, unless the
// peer dep is met by a node at that location, which is fine.
//
// If we create a new node, then build its ideal deps as well.
//
// Note: this is the same "maximally naive" deduping tree-building
// algorithm that npm has used since v3. In a case like this:
//
// root -> (a@1, b@1||2)
// a -> (b@1)
//
// You'll end up with a tree like this:
//
// root
// +-- a@1
// | +-- b@1
// +-- b@2
//
// rather than this, more deduped, but just as correct tree:
//
// root
// +-- a@1
// +-- b@1
//
// Another way to look at it is that this algorithm favors getting higher
// version deps at higher levels in the tree, even if that reduces
// potential deduplication.
//
// Set `preferDedupe: true` in the options to replace the shallower
// dep if allowed.
const tasks = []
const peerSource = this[_peerSetSource].get(node) || node
for (const edge of this[_problemEdges](node)) {
if (this[_edgesOverridden].has(edge))
continue
// peerSetSource is only relevant when we have a peerEntryEdge
// otherwise we're setting regular non-peer deps as if they have
// a virtual root of whatever brought in THIS node.
// so we VR the node itself if the edge is not a peer
const source = edge.peer ? peerSource : node
const virtualRoot = this[_virtualRoot](source, true)
// reuse virtual root if we already have one, but don't
// try to do the override ahead of time, since we MAY be able
// to create a more correct tree than the virtual root could.
const vrEdge = virtualRoot && virtualRoot.edgesOut.get(edge.name)
const vrDep = vrEdge && vrEdge.valid && vrEdge.to
// only re-use the virtualRoot if it's a peer edge we're placing.
// otherwise, we end up in situations where we override peer deps that
// we could have otherwise found homes for. Eg:
// xy -> (x, y)
// x -> PEER(z@1)
// y -> PEER(z@2)
// If xy is a dependency, we can resolve this like:
// project
// +-- xy
// | +-- y
// | +-- z@2
// +-- x
// +-- z@1
// But if x and y are loaded in the same virtual root, then they will
// be forced to agree on a version of z.
const required = new Set([edge.from])
const parent = edge.peer ? virtualRoot : null
const dep = vrDep && vrDep.satisfies(edge) ? vrDep
: await this[_nodeFromEdge](edge, parent, null, required)
/* istanbul ignore next */
debug(() => {
if (!dep)
throw new Error('no dep??')
})
tasks.push({edge, dep})
}
const placed = tasks
.sort((a, b) => a.edge.name.localeCompare(b.edge.name))
.map(({ edge, dep }) => this[_placeDep](dep, node, edge))
const promises = []
for (const set of placed) {
for (const node of set) {
this[_mutateTree] = true
this.addTracker('idealTree', node.name, node.location)
this[_depsQueue].push(node)
// we're certainly going to need these soon, fetch them asap
// if it fails at this point, though, dont' worry because it
// may well be an optional dep that has gone missing. it'll
// fail later anyway.
const from = fromPath(node)
promises.push(...this[_problemEdges](node).map(e =>
this[_fetchManifest](npa.resolve(e.name, e.spec, from))
.catch(er => null)))
}
}
await Promise.all(promises)
for (const { to } of node.edgesOut.values()) {
if (to && to.isLink)
this[_linkNodes].add(to)
}
return this[_buildDepStep]()
}
// loads a node from an edge, and then loads its peer deps (and their
// peer deps, on down the line) into a virtual root parent.
async [_nodeFromEdge] (edge, parent_, secondEdge, required) {
// create a virtual root node with the same deps as the node that
// is requesting this one, so that we can get all the peer deps in
// a context where they're likely to be resolvable.
// Note that the virtual root will also have virtual copies of the
// targets of any child Links, so that they resolve appropriately.
const parent = parent_ || this[_virtualRoot](edge.from)
const realParent = edge.peer ? edge.from.resolveParent : edge.from
const spec = npa.resolve(edge.name, edge.spec, edge.from.path)
const first = await this[_nodeFromSpec](edge.name, spec, parent, edge)
// we might have a case where the parent has a peer dependency on
// `foo@*` which resolves to v2, but another dep in the set has a
// peerDependency on `foo@1`. In that case, if we force it to be v2,
// we're unnecessarily triggering an ERESOLVE.
// If we have a second edge to worry about, and it's not satisfied
// by the first node, try a second and see if that satisfies the
// original edge here.
const spec2 = secondEdge && npa.resolve(
edge.name,
secondEdge.spec,
secondEdge.from.path
)
const second = secondEdge && !secondEdge.valid
? await this[_nodeFromSpec](edge.name, spec2, parent, secondEdge)
: null
// pick the second one if they're both happy with that, otherwise first
const node = second && edge.valid ? second : first
// ensure the one we want is the one that's placed
node.parent = parent
if (required.has(edge.from) && edge.type !== 'peerOptional' ||
secondEdge && (
required.has(secondEdge.from) && secondEdge.type !== 'peerOptional'))
required.add(node)
// handle otherwise unresolvable dependency nesting loops by
// creating a symbolic link
// a1 -> b1 -> a2 -> b2 -> a1 -> ...
// instead of nesting forever, when the loop occurs, create
// a symbolic link to the earlier instance
for (let p = edge.from.resolveParent; p; p = p.resolveParent) {
if (p.matches(node) && !p.isTop)
return new Link({ parent: realParent, target: p })
}
// keep track of the thing that caused this node to be included.
const src = parent.sourceReference
this[_peerSetSource].set(node, src)
return this[_loadPeerSet](node, required)
}
[_virtualRoot] (node, reuse = false) {
if (reuse && this[_virtualRoots].has(node))
return this[_virtualRoots].get(node)
const vr = new Node({
path: node.realpath,
sourceReference: node,
legacyPeerDeps: this.legacyPeerDeps,
})
// also need to set up any targets from any link deps, so that
// they are properly reflected in the virtual environment
for (const child of node.children.values()) {
if (child.isLink) {
new Node({
path: child.realpath,
sourceReference: child.target,
root: vr,
})
}
}
this[_virtualRoots].set(node, vr)
return vr
}
[_problemEdges] (node) {
// skip over any bundled deps, they're not our problem.
// Note that this WILL fetch bundled meta-deps which are also dependencies
// but not listed as bundled deps. When reifying, we first unpack any
// nodes that have bundleDependencies, then do a loadActual on them, move
// the nodes into the ideal tree, and then prune. So, fetching those
// possibly-bundled meta-deps at this point doesn't cause any worse
// problems than a few unnecessary packument fetches.
// also skip over any nodes in the tree that failed to load, since those
// will crash the install later on anyway.
const bd = node.isProjectRoot ? null : node.package.bundleDependencies
const bundled = new Set(bd || [])
return [...node.edgesOut.values()]
.filter(edge => {
// If it's included in a bundle, we take whatever is specified.
if (bundled.has(edge.name))
return false
// If it's already been logged as a load failure, skip it.
if (edge.to && this[_loadFailures].has(edge.to))
return false
// If it's shrinkwrapped, we use what the shrinkwap wants.
if (edge.to && edge.to.inShrinkwrap)
return false
// If the edge has no destination, that's a problem, unless
// if it's peerOptional and not explicitly requested.
if (!edge.to) {
return edge.type !== 'peerOptional' ||
this[_explicitRequests].has(edge.name)
}
// If the edge has an error, there's a problem.
if (!edge.valid)
return true