Skip to content

Commit

Permalink
[BREAKING] Resolve cached values after batch dispatch
Browse files Browse the repository at this point in the history
  • Loading branch information
leebyron committed Nov 15, 2019
1 parent b5d7bf5 commit fdbb624
Show file tree
Hide file tree
Showing 4 changed files with 176 additions and 44 deletions.
42 changes: 32 additions & 10 deletions README.md
Expand Up @@ -140,16 +140,6 @@ DataLoader provides a memoization cache for all loads which occur in a single
request to your application. After `.load()` is called once with a given key,
the resulting value is cached to eliminate redundant loads.

In addition to relieving pressure on your data storage, caching results per-request
also creates fewer objects which may relieve memory pressure on your application:

```js
const userLoader = new DataLoader(...)
const promise1A = userLoader.load(1)
const promise1B = userLoader.load(1)
assert(promise1A === promise1B)
```

#### Caching Per-Request

DataLoader caching *does not* replace Redis, Memcache, or any other shared
Expand Down Expand Up @@ -183,6 +173,38 @@ app.get('/', function(req, res) {
app.listen()
```

#### Caching and Batching

Subsequent calls to `.load()` with the same key will result in that key not
appearing in the keys provided to your batch function. *However*, the resulting
Promise will still wait on the current batch to complete. This way both cached
and uncached requests will resolve at the same time, allowing DataLoader
optimizations for subsequent dependent loads.

In the example below, User `1` happens to be cached. However, because User `1`
and `2` are loaded in the same tick, they will resolve at the same time. This
means both `user.bestFriendID` loads will also happen in the same tick which
results in two total requests (the same as if User `1` had not been cached).

```js
userLoader.prime(1, { bestFriend: 3 })

async function getBestFriend(userID) {
const user = await userLoader.load(userID)
return await userLoader.load(user.bestFriendID)
}

// In one part of your application
getBestFriend(1)

// Elsewhere
getBestFriend(2)
```

Without this optimization, if the cached User `1` resolved immediately, this
could result in three total requests since each `user.bestFriendID` load would
happen at different times.

#### Clearing Cache

In certain uncommon cases, clearing the request cache may be necessary.
Expand Down
114 changes: 83 additions & 31 deletions src/__tests__/dataloader.test.js
Expand Up @@ -105,14 +105,95 @@ describe('Primary API', () => {
expect(loadCalls).toEqual([ [ 1, 2 ], [ 3 ] ]);
});

it('batches cached requests', async () => {
const loadCalls = [];
let resolveBatch = () => {};
const identityLoader = new DataLoader<number, number>(keys => {
loadCalls.push(keys);
return new Promise(resolve => {
resolveBatch = () => resolve(keys);
});
});

identityLoader.prime(1, 1);

const promise1 = identityLoader.load(1);
const promise2 = identityLoader.load(2);

// Track when each resolves.
let promise1Resolved = false;
let promise2Resolved = false;
promise1.then(() => { promise1Resolved = true; });
promise2.then(() => { promise2Resolved = true; });

// Move to next macro-task (tick)
await new Promise(setImmediate);

expect(promise1Resolved).toBe(false);
expect(promise2Resolved).toBe(false);

resolveBatch();
// Move to next macro-task (tick)
await new Promise(setImmediate);

expect(promise1Resolved).toBe(true);
expect(promise2Resolved).toBe(true);

const [ value1, value2 ] = await Promise.all([ promise1, promise2 ]);
expect(value1).toBe(1);
expect(value2).toBe(2);

expect(loadCalls).toEqual([ [ 2 ] ]);
});

it('max batch size respects cached results', async () => {
const loadCalls = [];
let resolveBatch = () => {};
const identityLoader = new DataLoader<number, number>(keys => {
loadCalls.push(keys);
return new Promise(resolve => {
resolveBatch = () => resolve(keys);
});
}, { maxBatchSize: 1 });

identityLoader.prime(1, 1);

const promise1 = identityLoader.load(1);
const promise2 = identityLoader.load(2);

// Track when each resolves.
let promise1Resolved = false;
let promise2Resolved = false;
promise1.then(() => { promise1Resolved = true; });
promise2.then(() => { promise2Resolved = true; });

// Move to next macro-task (tick)
await new Promise(setImmediate);

// Promise 1 resolves first since max batch size is 1
expect(promise1Resolved).toBe(true);
expect(promise2Resolved).toBe(false);

resolveBatch();
// Move to next macro-task (tick)
await new Promise(setImmediate);

expect(promise1Resolved).toBe(true);
expect(promise2Resolved).toBe(true);

const [ value1, value2 ] = await Promise.all([ promise1, promise2 ]);
expect(value1).toBe(1);
expect(value2).toBe(2);

expect(loadCalls).toEqual([ [ 2 ] ]);
});

it('coalesces identical requests', async () => {
const [ identityLoader, loadCalls ] = idLoader<number>();

const promise1a = identityLoader.load(1);
const promise1b = identityLoader.load(1);

expect(promise1a).toBe(promise1b);

const [ value1a, value1b ] = await Promise.all([ promise1a, promise1b ]);
expect(value1a).toBe(1);
expect(value1b).toBe(1);
Expand Down Expand Up @@ -388,35 +469,6 @@ describe('Represents Errors', () => {
expect(loadCalls).toEqual([]);
});

// TODO: #224
/*
it('Not catching a primed error is an unhandled rejection', async () => {
let hadUnhandledRejection = false;
function onUnhandledRejection() {
hadUnhandledRejection = true;
}
process.on('unhandledRejection', onUnhandledRejection);
try {
const [ identityLoader ] = idLoader<number>();
identityLoader.prime(1, new Error('Error: 1'));
// Wait a bit.
await new Promise(setImmediate);
// Ignore result.
identityLoader.load(1);
// Wait a bit.
await new Promise(setImmediate);
expect(hadUnhandledRejection).toBe(true);
} finally {
process.removeListener('unhandledRejection', onUnhandledRejection);
}
});
*/

it('Can clear values from cache after errors', async () => {
const loadCalls = [];
const errorLoader = new DataLoader(keys => {
Expand Down
31 changes: 31 additions & 0 deletions src/__tests__/unhandled.test.js
@@ -0,0 +1,31 @@
/**
* Copyright (c) 2019-present, GraphQL Foundation
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/

const DataLoader = require('..');

describe('Unhandled rejections', () => {
it('Not catching a primed error is an unhandled rejection', async () => {
let hadUnhandledRejection = false;
// Override Jest's unhandled detection
global.jasmine.process.removeAllListeners('unhandledRejection');
global.jasmine.process.on('unhandledRejection', () => {
hadUnhandledRejection = true;
});

const identityLoader = new DataLoader<number, number>(async keys => keys);

identityLoader.prime(1, new Error('Error: 1'));

// Ignore result.
identityLoader.load(1);

await new Promise(resolve => setTimeout(resolve, 10));
expect(hadUnhandledRejection).toBe(true);
});
});
33 changes: 30 additions & 3 deletions src/index.js
Expand Up @@ -84,7 +84,10 @@ class DataLoader<K, V, C = K> {
if (cache) {
var cachedPromise = cache.get(cacheKey);
if (cachedPromise) {
return cachedPromise;
var cacheHits = batch.cacheHits || (batch.cacheHits = []);
return new Promise(resolve => {
cacheHits.push(() => resolve(cachedPromise));
});
}
}

Expand Down Expand Up @@ -241,7 +244,8 @@ type Batch<K, V> = {
callbacks: Array<{
resolve: (value: V) => void;
reject: (error: Error) => void;
}>
}>,
cacheHits?: Array<() => void>
}

// Private: Either returns the current batch, or creates and schedules a
Expand All @@ -258,7 +262,10 @@ function getCurrentBatch<K, V>(loader: DataLoader<K, V, any>): Batch<K, V> {
if (
existingBatch !== null &&
!existingBatch.hasDispatched &&
(maxBatchSize === 0 || existingBatch.keys.length < maxBatchSize)
(maxBatchSize === 0 ||
(existingBatch.keys.length < maxBatchSize &&
(!existingBatch.cacheHits ||
existingBatch.cacheHits.length < maxBatchSize)))
) {
return existingBatch;
}
Expand All @@ -282,6 +289,12 @@ function dispatchBatch<K, V>(
// Mark this batch as having been dispatched.
batch.hasDispatched = true;

// If there's nothing to load, resolve any cache hits and return early.
if (batch.keys.length === 0) {
resolveCacheHits(batch);
return;
}

// Call the provided batchLoadFn for this loader with the batch's keys and
// with the loader as the `this` context.
var batchPromise = loader._batchLoadFn(batch.keys);
Expand Down Expand Up @@ -317,6 +330,9 @@ function dispatchBatch<K, V>(
);
}

// Resolve all cache hits in the same micro-task as freshly loaded values.
resolveCacheHits(batch);

// Step through values, resolving or rejecting each Promise in the batch.
for (var i = 0; i < batch.callbacks.length; i++) {
var value = values[i];
Expand All @@ -336,12 +352,23 @@ function failedDispatch<K, V>(
batch: Batch<K, V>,
error: Error
) {
// Cache hits are resolved, even though the batch failed.
resolveCacheHits(batch);
for (var i = 0; i < batch.keys.length; i++) {
loader.clear(batch.keys[i]);
batch.callbacks[i].reject(error);
}
}

// Private: Resolves the Promises for any cache hits in this batch.
function resolveCacheHits(batch: Batch<any, any>) {
if (batch.cacheHits) {
for (var i = 0; i < batch.cacheHits.length; i++) {
batch.cacheHits[i]();
}
}
}

// Private: produce a cache key for a given key (and options)
function getCacheKey<K, V, C>(
options: ?Options<K, V, C>,
Expand Down

0 comments on commit fdbb624

Please sign in to comment.