From 488a7ae0463e9b5b71844d2e070c06d173a96e10 Mon Sep 17 00:00:00 2001 From: Lee Byron Date: Thu, 14 Nov 2019 12:31:30 -0800 Subject: [PATCH] [BREAKING] Resolve cached values after batch dispatch --- README.md | 42 ++++++++++++++----- src/__tests__/dataloader.test.js | 72 ++++++++++++++++++-------------- src/__tests__/unhandled.test.js | 31 ++++++++++++++ src/index.js | 33 +++++++++++++-- 4 files changed, 134 insertions(+), 44 deletions(-) create mode 100644 src/__tests__/unhandled.test.js diff --git a/README.md b/README.md index 369e6fa..9fa382e 100644 --- a/README.md +++ b/README.md @@ -140,16 +140,6 @@ DataLoader provides a memoization cache for all loads which occur in a single request to your application. After `.load()` is called once with a given key, the resulting value is cached to eliminate redundant loads. -In addition to relieving pressure on your data storage, caching results per-request -also creates fewer objects which may relieve memory pressure on your application: - -```js -const userLoader = new DataLoader(...) -const promise1A = userLoader.load(1) -const promise1B = userLoader.load(1) -assert(promise1A === promise1B) -``` - #### Caching Per-Request DataLoader caching *does not* replace Redis, Memcache, or any other shared @@ -183,6 +173,38 @@ app.get('/', function(req, res) { app.listen() ``` +#### Caching and Batching + +Subsequent calls to `.load()` with the same key will result in that key not +appearing in the keys provided to your batch function. *However*, the resulting +Promise will still wait on the current batch to complete. This way both cached +and uncached requests will resolve at the same time, allowing DataLoader +optimizations for subsequent dependent loads. + +In the example below, User `1` happens to be cached. However, because User `1` +and `2` are loaded in the same tick, they will resolve at the same time. This +means both `user.bestFriendID` loads will also happen in the same tick which +results in two total requests (the same as if User `1` had not been cached). + +```js +userLoader.prime(1, { bestFriend: 3 }) + +async function getBestFriend(userID) { + const user = await userLoader.load(userID) + return await userLoader.load(user.bestFriendID) +} + +// In one part of your application +getBestFriend(1) + +// Elsewhere +getBestFriend(2) +``` + +Without this optimization, if the cached User `1` resolved immediately, this +could result in three total requests since each `user.bestFriendID` load would +happen at different times. + #### Clearing Cache In certain uncommon cases, clearing the request cache may be necessary. diff --git a/src/__tests__/dataloader.test.js b/src/__tests__/dataloader.test.js index d3845cd..87bb9a8 100644 --- a/src/__tests__/dataloader.test.js +++ b/src/__tests__/dataloader.test.js @@ -91,14 +91,53 @@ describe('Primary API', () => { expect(loadCalls).toEqual([ [ 1, 2 ], [ 3 ] ]); }); + it('batches cached requests', async () => { + const loadCalls = []; + let resolveBatch = () => {}; + const identityLoader = new DataLoader(keys => { + loadCalls.push(keys); + return new Promise(resolve => { + resolveBatch = () => resolve(keys); + }); + }); + + identityLoader.prime(1, 1); + + const promise1 = identityLoader.load(1); + const promise2 = identityLoader.load(2); + + // Track when each resolves. + let promise1Resolved = false; + let promise2Resolved = false; + promise1.then(() => { promise1Resolved = true; }); + promise2.then(() => { promise2Resolved = true; }); + + // Move to next macro-task (tick) + await new Promise(setImmediate); + + expect(promise1Resolved).toBe(false); + expect(promise2Resolved).toBe(false); + + resolveBatch(); + // Move to next macro-task (tick) + await new Promise(setImmediate); + + expect(promise1Resolved).toBe(true); + expect(promise2Resolved).toBe(true); + + const [ value1, value2 ] = await Promise.all([ promise1, promise2 ]); + expect(value1).toBe(1); + expect(value2).toBe(2); + + expect(loadCalls).toEqual([ [ 2 ] ]); + }); + it('coalesces identical requests', async () => { const [ identityLoader, loadCalls ] = idLoader(); const promise1a = identityLoader.load(1); const promise1b = identityLoader.load(1); - expect(promise1a).toBe(promise1b); - const [ value1a, value1b ] = await Promise.all([ promise1a, promise1b ]); expect(value1a).toBe(1); expect(value1b).toBe(1); @@ -374,35 +413,6 @@ describe('Represents Errors', () => { expect(loadCalls).toEqual([]); }); - // TODO: #224 - /* - it('Not catching a primed error is an unhandled rejection', async () => { - let hadUnhandledRejection = false; - function onUnhandledRejection() { - hadUnhandledRejection = true; - } - process.on('unhandledRejection', onUnhandledRejection); - try { - const [ identityLoader ] = idLoader(); - - identityLoader.prime(1, new Error('Error: 1')); - - // Wait a bit. - await new Promise(setImmediate); - - // Ignore result. - identityLoader.load(1); - - // Wait a bit. - await new Promise(setImmediate); - - expect(hadUnhandledRejection).toBe(true); - } finally { - process.removeListener('unhandledRejection', onUnhandledRejection); - } - }); - */ - it('Can clear values from cache after errors', async () => { const loadCalls = []; const errorLoader = new DataLoader(keys => { diff --git a/src/__tests__/unhandled.test.js b/src/__tests__/unhandled.test.js new file mode 100644 index 0000000..4b7a8fe --- /dev/null +++ b/src/__tests__/unhandled.test.js @@ -0,0 +1,31 @@ +/** + * Copyright (c) 2019-present, GraphQL Foundation + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + */ + +const DataLoader = require('..'); + +describe('Unhandled rejections', () => { + it('Not catching a primed error is an unhandled rejection', async () => { + let hadUnhandledRejection = false; + // Override Jest's unhandled detection + global.jasmine.process.removeAllListeners('unhandledRejection'); + global.jasmine.process.on('unhandledRejection', () => { + hadUnhandledRejection = true; + }); + + const identityLoader = new DataLoader(async keys => keys); + + identityLoader.prime(1, new Error('Error: 1')); + + // Ignore result. + identityLoader.load(1); + + await new Promise(resolve => setTimeout(resolve, 10)); + expect(hadUnhandledRejection).toBe(true); + }); +}); diff --git a/src/index.js b/src/index.js index f882212..1927663 100644 --- a/src/index.js +++ b/src/index.js @@ -84,7 +84,10 @@ class DataLoader { if (cache) { var cachedPromise = cache.get(cacheKey); if (cachedPromise) { - return cachedPromise; + var cacheHits = batch.cacheHits || (batch.cacheHits = []); + return new Promise(resolve => { + cacheHits.push(() => resolve(cachedPromise)); + }); } } @@ -234,7 +237,8 @@ type Batch = { callbacks: Array<{ resolve: (value: V) => void; reject: (error: Error) => void; - }> + }>, + cacheHits?: Array<() => void> } // Private: Either returns the current batch, or creates and schedules a @@ -251,7 +255,10 @@ function getCurrentBatch(loader: DataLoader): Batch { if ( existingBatch !== null && !existingBatch.hasDispatched && - (maxBatchSize === 0 || existingBatch.keys.length < maxBatchSize) + (maxBatchSize === 0 || + (existingBatch.keys.length < maxBatchSize && + (!existingBatch.cacheHits || + existingBatch.cacheHits.length < maxBatchSize))) ) { return existingBatch; } @@ -275,6 +282,12 @@ function dispatchBatch( // Mark this batch as having been dispatched. batch.hasDispatched = true; + // If there's nothing to load, resolve any cache hits and return early. + if (batch.keys.length === 0) { + resolveCacheHits(batch); + return; + } + // Call the provided batchLoadFn for this loader with the batch's keys and // with the loader as the `this` context. var batchPromise = loader._batchLoadFn(batch.keys); @@ -310,6 +323,9 @@ function dispatchBatch( ); } + // First resolve all cache hits. + resolveCacheHits(batch); + // Step through values, resolving or rejecting each Promise in the batch. for (var i = 0; i < batch.callbacks.length; i++) { var value = values[i]; @@ -329,12 +345,23 @@ function failedDispatch( batch: Batch, error: Error ) { + // Cache hits are resolved, even though the batch failed. + resolveCacheHits(batch); for (var i = 0; i < batch.keys.length; i++) { loader.clear(batch.keys[i]); batch.callbacks[i].reject(error); } } +// Private: Resolves the Promises for any cache hits in this batch. +function resolveCacheHits(batch: Batch) { + if (batch.cacheHits) { + for (var i = 0; i < batch.cacheHits.length; i++) { + batch.cacheHits[i](); + } + } +} + // Private: produce a cache key for a given key (and options) function getCacheKey( options: ?Options,