/
generate-pnp-map.js
459 lines (365 loc) · 16.6 KB
/
generate-pnp-map.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
// @flow
import type Config from '../config.js';
import type WorkspaceLayout from '../workspace-layout.js';
import type PackageResolver from '../package-resolver.js';
import type Reporter from '../reporters/base-reporter.js';
import pnpApi from './generate-pnp-map-api.tpl.js';
import * as fs from './fs.js';
const crypto = require('crypto');
const invariant = require('invariant');
const path = require('path');
const backwardSlashRegExp = /\\/g;
const OFFLINE_CACHE_EXTENSION = `.zip`;
type PackageInformation = {|
packageLocation: string,
packageDependencies: Map<string, string>,
|};
type PackageInformationStore = Map<string | null, PackageInformation>;
type PackageInformationStores = Map<string | null, PackageInformationStore>;
type GeneratePnpMapOptions = {|
resolver: PackageResolver,
reporter: Reporter,
targetPath: string,
workspaceLayout: ?WorkspaceLayout,
|};
function generateMaps(packageInformationStores: PackageInformationStores, blacklistedLocations: Set<string>): string {
let code = ``;
// Bake the information stores into our generated code
code += `let packageInformationStores = new Map([\n`;
for (const [packageName, packageInformationStore] of packageInformationStores) {
code += ` [${JSON.stringify(packageName)}, new Map([\n`;
for (const [packageReference, {packageLocation, packageDependencies}] of packageInformationStore) {
code += ` [${JSON.stringify(packageReference)}, {\n`;
code += ` packageLocation: path.resolve(__dirname, ${JSON.stringify(packageLocation)}),\n`;
code += ` packageDependencies: new Map([\n`;
for (const [dependencyName, dependencyReference] of packageDependencies.entries()) {
code += ` [${JSON.stringify(dependencyName)}, ${JSON.stringify(dependencyReference)}],\n`;
}
code += ` ]),\n`;
code += ` }],\n`;
}
code += ` ])],\n`;
}
code += `]);\n`;
code += `\n`;
// Also bake an inverse map that will allow us to find the package information based on the path
code += `let locatorsByLocations = new Map([\n`;
for (const blacklistedLocation of blacklistedLocations) {
code += ` [${JSON.stringify(blacklistedLocation)}, blacklistedLocator],\n`;
}
for (const [packageName, packageInformationStore] of packageInformationStores) {
for (const [packageReference, {packageLocation}] of packageInformationStore) {
if (packageName !== null) {
code += ` [${JSON.stringify(packageLocation)}, ${JSON.stringify({
name: packageName,
reference: packageReference,
})}],\n`;
} else {
code += ` [${JSON.stringify(packageLocation)}, topLevelLocator],\n`;
}
}
}
code += `]);\n`;
return code;
}
function generateFindPackageLocator(packageInformationStores: PackageInformationStores): string {
let code = ``;
// We get the list of each string length we'll need to check in order to find the current package context
const lengths = new Map();
for (const packageInformationStore of packageInformationStores.values()) {
for (const {packageLocation} of packageInformationStore.values()) {
if (packageLocation === null) {
continue;
}
const length = packageLocation.length;
const count = (lengths.get(length) || 0) + 1;
lengths.set(length, count);
}
}
// We must try the larger lengths before the smaller ones, because smaller ones might also match the longest ones
// (for instance, /project/path will match /project/path/.pnp/global/node_modules/pnp-cf5f9c17b8f8db)
const sortedLengths = Array.from(lengths.entries()).sort((a, b) => {
return b[0] - a[0];
});
// Generate a function that, given a file path, returns the associated package name
code += `exports.findPackageLocator = function findPackageLocator(location) {\n`;
code += ` let relativeLocation = normalizePath(path.relative(__dirname, location));\n`;
code += `\n`;
code += ` if (!relativeLocation.match(isStrictRegExp))\n`;
code += ` relativeLocation = \`./\${relativeLocation}\`;\n`;
code += `\n`;
code += ` if (location.match(isDirRegExp) && relativeLocation.charAt(relativeLocation.length - 1) !== '/')\n`;
code += ` relativeLocation = \`\${relativeLocation}/\`;\n`;
code += `\n`;
code += ` let match;\n`;
for (const [length] of sortedLengths) {
code += `\n`;
code += ` if (relativeLocation.length >= ${length} && relativeLocation[${length - 1}] === '/')\n`;
code += ` if (match = locatorsByLocations.get(relativeLocation.substr(0, ${length})))\n`;
code += ` return blacklistCheck(match);\n`;
}
code += `\n`;
code += ` return null;\n`;
code += `};\n`;
return code;
}
async function getPackageInformationStores(
config: Config,
seedPatterns: Array<string>,
{resolver, reporter, targetPath, workspaceLayout}: GeneratePnpMapOptions,
): Promise<[PackageInformationStores, Set<string>]> {
const targetDirectory = path.dirname(targetPath);
const offlineCacheFolder = config.offlineCacheFolder;
const packageInformationStores: PackageInformationStores = new Map();
const blacklistedLocations: Set<string> = new Set();
const getCachePath = (fsPath: string) => {
const cacheRelativePath = normalizePath(path.relative(config.cacheFolder, fsPath));
// if fsPath is not inside cacheRelativePath, we just skip it
if (cacheRelativePath.match(/^\.\.\//)) {
return null;
}
return cacheRelativePath;
};
const resolveOfflineCacheFolder = (fsPath: string) => {
if (!offlineCacheFolder) {
return fsPath;
}
const cacheRelativePath = getCachePath(fsPath);
// if fsPath is not inside the cache, we shouldn't replace it (workspace)
if (!cacheRelativePath) {
return fsPath;
}
const components = cacheRelativePath.split(/\//g);
const [cacheEntry, ...internalPath] = components;
return path.resolve(offlineCacheFolder, `${cacheEntry}${OFFLINE_CACHE_EXTENSION}`, internalPath.join('/'));
};
const normalizePath = (fsPath: string) => {
return process.platform === 'win32' ? fsPath.replace(backwardSlashRegExp, '/') : fsPath;
};
const normalizeDirectoryPath = (fsPath: string) => {
let relativePath = normalizePath(path.relative(targetDirectory, resolveOfflineCacheFolder(fsPath)));
if (!relativePath.match(/^\.{0,2}\//)) {
relativePath = `./${relativePath}`;
}
return relativePath.replace(/\/?$/, '/');
};
const getHashFrom = (data: Array<string>) => {
const hashGenerator = crypto.createHash('sha1');
for (const datum of data) {
hashGenerator.update(datum);
}
return hashGenerator.digest('hex');
};
const getResolverEntry = pattern => {
const pkg = resolver.getStrictResolvedPattern(pattern);
const ref = pkg._reference;
if (!ref) {
return null;
}
invariant(ref.locations.length <= 1, 'Must have at most one location (usually in the cache)');
const loc = ref.locations[0];
if (!loc) {
return null;
}
return {pkg, ref, loc};
};
const visit = async (
precomputedResolutions: Map<string, string>,
seedPatterns: Array<string>,
parentData: Array<string> = [],
) => {
const resolutions = new Map(precomputedResolutions);
const locations = new Map();
// This first pass will compute the package reference of each of the given patterns
// They will usually be the package version, but not always. We need to do this in a pre-process pass, because the
// dependencies might depend on one another, so if we need to replace one of them, we need to compute it first
for (const pattern of seedPatterns) {
const entry = getResolverEntry(pattern);
if (!entry) {
continue;
}
const {pkg, ref} = entry;
let {loc} = entry;
const packageName = pkg.name;
let packageReference = pkg.version;
// If we have peer dependencies, then we generate a new virtual reference based on the parent one
// We cannot generate this reference based on what those peer references resolve to, because they might not have
// been computed yet (for example, consider the case where A has a peer dependency on B, and B a peer dependency
// on A; it's valid, but it prevents us from computing A and B - and it's even worse with 3+ packages involved)
const peerDependencies = new Set(Array.from(Object.keys(pkg.peerDependencies || {})));
// As an optimization, we only setup virtual packages if their underlying packages are referenced multiple times
// in the tree. This allow us to avoid having to create symlinks in the majority of cases
if (peerDependencies.size > 0 && ref.requests.length > 1) {
const hash = getHashFrom([...parentData, packageName, packageReference]);
let symlinkSource;
let symlinkFile;
switch (ref.remote.type) {
case 'workspace':
{
symlinkSource = loc;
symlinkFile = path.resolve(config.lockfileFolder, '.pnp', 'workspaces', `pnp-${hash}`, packageName);
loc = symlinkFile;
}
break;
default:
{
const isFromCache = getCachePath(loc);
const hashName =
isFromCache && offlineCacheFolder ? `pnp-${hash}${OFFLINE_CACHE_EXTENSION}` : `pnp-${hash}`;
const newLoc = path.resolve(
config.lockfileFolder,
'.pnp',
'externals',
hashName,
'node_modules',
packageName,
);
// The `node_modules/<pkgName>` part is already there when the package comes from the cache
if (isFromCache) {
const getBase = source => path.resolve(source, '../'.repeat(1 + packageName.split('/').length));
symlinkSource = resolveOfflineCacheFolder(getBase(loc));
symlinkFile = getBase(newLoc);
} else {
symlinkSource = loc;
symlinkFile = newLoc;
}
loc = newLoc;
}
break;
}
await fs.mkdirp(path.dirname(symlinkFile));
await fs.symlink(symlinkSource, symlinkFile);
packageReference = `pnp:${hash}`;
// We blacklist this path so that we can print a nicer error message if someone tries to require it (it usually
// means that they're using realpath on the return value of require.resolve)
blacklistedLocations.add(normalizeDirectoryPath(loc));
}
// Now that we have the final reference, we need to store it
resolutions.set(packageName, packageReference);
locations.set(packageName, loc);
}
// Now that we have the final references, we can start the main loop, which will insert the packages into the store
// if they aren't already there, and recurse over their own children
for (const pattern of seedPatterns) {
const entry = getResolverEntry(pattern);
if (!entry) {
continue;
}
const {pkg, ref} = entry;
const packageName = pkg.name;
const packageReference = resolutions.get(packageName);
invariant(packageReference, `Package reference should have been computed during the pre-pass`);
const loc = locations.get(packageName);
invariant(loc, `Package location should have been computed during the pre-pass`);
// We can early exit if the package is already registered with the exact same name and reference, since even if
// we might get slightly different dependencies (depending on how things were optimized), both sets are valid
let packageInformationStore = packageInformationStores.get(packageName);
if (!packageInformationStore) {
packageInformationStore = new Map();
packageInformationStores.set(packageName, packageInformationStore);
}
let packageInformation = packageInformationStore.get(packageReference);
if (packageInformation) {
continue;
}
packageInformation = {
packageLocation: normalizeDirectoryPath(loc),
packageDependencies: new Map(),
};
// Split the dependencies between direct/peer - we will only recurse on the former
const peerDependencies = new Set(Array.from(Object.keys(pkg.peerDependencies || {})));
const directDependencies = ref.dependencies.filter(pattern => {
const pkg = resolver.getStrictResolvedPattern(pattern);
return !pkg || !peerDependencies.has(pkg.name);
});
// We inject the partial information in the store right now so that we won't cycle indefinitely
packageInformationStore.set(packageReference, packageInformation);
// We must inject the peer dependencies before iterating; one of our dependencies might have a peer dependency
// on one of our peer dependencies, so it must be available from the start (we don't have to do that for direct
// dependencies, because the "visit" function that will iterate over them will automatically add the to the
// candidate resolutions as part of the first step, cf above)
for (const dependencyName of peerDependencies) {
const dependencyReference = resolutions.get(dependencyName);
if (dependencyReference) {
packageInformation.packageDependencies.set(dependencyName, dependencyReference);
}
}
const childResolutions = await visit(packageInformation.packageDependencies, directDependencies, [
packageName,
packageReference,
]);
// We can now inject into our package the resolutions we got from the visit function
for (const [name, reference] of childResolutions.entries()) {
packageInformation.packageDependencies.set(name, reference);
}
// Finally, unless a package depends on a previous version of itself (that would be weird but correct...), we
// inject them an implicit dependency to themselves (so that they can require themselves)
if (!packageInformation.packageDependencies.has(packageName)) {
packageInformation.packageDependencies.set(packageName, packageReference);
}
}
return resolutions;
};
// If we have workspaces, we need to iterate over them all in order to add them to the map
// This is because they might not be declared as dependencies of the top-level project (and with reason, since the
// top-level package might depend on a different than the one provided in the workspaces - cf Babel, which depends
// on an old version of itself in order to compile itself)
if (workspaceLayout) {
for (const name of Object.keys(workspaceLayout.workspaces)) {
const pkg = workspaceLayout.workspaces[name].manifest;
// Skip the aggregator, since it's essentially a duplicate of the top-level package that we'll iterate later on
if (pkg.workspaces) {
continue;
}
const ref = pkg._reference;
invariant(ref, `Workspaces should have a reference`);
invariant(ref.locations.length === 1, `Workspaces should have exactly one location`);
const loc = ref.locations[0];
invariant(loc, `Workspaces should have a location`);
let packageInformationStore = packageInformationStores.get(name);
if (!packageInformationStore) {
packageInformationStore = new Map();
packageInformationStores.set(name, packageInformationStore);
}
packageInformationStore.set(pkg.version, {
packageLocation: normalizeDirectoryPath(loc),
packageDependencies: await visit(new Map(), ref.dependencies, [name, pkg.version]),
});
}
}
// Register the top-level package in our map
// This will recurse on each of its dependencies as well.
packageInformationStores.set(
null,
new Map([
[
null,
{
packageLocation: normalizeDirectoryPath(config.lockfileFolder),
packageDependencies: await visit(new Map(), seedPatterns),
},
],
]),
);
return [packageInformationStores, blacklistedLocations];
}
export async function generatePnpMap(
config: Config,
seedPatterns: Array<string>,
{resolver, reporter, workspaceLayout, targetPath}: GeneratePnpMapOptions,
): Promise<string> {
const [packageInformationStores, blacklistedLocations] = await getPackageInformationStores(config, seedPatterns, {
resolver,
reporter,
targetPath,
workspaceLayout,
});
const setupStaticTables = [
generateMaps(packageInformationStores, blacklistedLocations),
generateFindPackageLocator(packageInformationStores),
].join(``);
return pnpApi
.replace(/\$\$SHEBANG/g, config.plugnplayShebang)
.replace(/\$\$BLACKLIST/g, JSON.stringify(config.plugnplayBlacklist))
.replace(/\$\$SETUP_STATIC_TABLES\(\);/g, setupStaticTables);
}