Skip to content

Commit

Permalink
[BREAKING] Resolve cached values after batch dispatch
Browse files Browse the repository at this point in the history
  • Loading branch information
leebyron committed Nov 14, 2019
1 parent 5649bff commit 5f7d2d7
Show file tree
Hide file tree
Showing 3 changed files with 103 additions and 15 deletions.
42 changes: 32 additions & 10 deletions README.md
Expand Up @@ -140,16 +140,6 @@ DataLoader provides a memoization cache for all loads which occur in a single
request to your application. After `.load()` is called once with a given key,
the resulting value is cached to eliminate redundant loads.

In addition to relieving pressure on your data storage, caching results per-request
also creates fewer objects which may relieve memory pressure on your application:

```js
const userLoader = new DataLoader(...)
const promise1A = userLoader.load(1)
const promise1B = userLoader.load(1)
assert(promise1A === promise1B)
```

#### Caching Per-Request

DataLoader caching *does not* replace Redis, Memcache, or any other shared
Expand Down Expand Up @@ -183,6 +173,38 @@ app.get('/', function(req, res) {
app.listen()
```

#### Caching and Batching

Subsequent calls to `.load()` with the same key will result in that key not
appearing in the keys provided to your batch function. *However*, the resulting
Promise will still wait on the current batch to complete. This way both cached
and uncached requests will resolve at the same time, allowing DataLoader
optimizations for subsequent dependent loads.

In the example below, User `1` happens to be cached. However, because User `1`
and `2` are loaded in the same tick, they will resolve at the same time. This
means both `user.bestFriendID` loads will also happen in the same tick which
results in two total requests (the same as if User `1` had not been cached).

```js
userLoader.prime(1, { bestFriend: 3 })

async function getBestFriend(userID) {
const user = await userLoader.load(userID)
return await userLoader.load(user.bestFriendID)
}

// In one part of your application
getBestFriend(1)

// Elsewhere
getBestFriend(2)
```

Without this optimization, if the cached User `1` resolved immediately, this
could result in three total requests since each `user.bestFriendID` load would
happen at different times.

#### Clearing Cache

In certain uncommon cases, clearing the request cache may be necessary.
Expand Down
43 changes: 41 additions & 2 deletions src/__tests__/dataloader.test.js
Expand Up @@ -91,14 +91,53 @@ describe('Primary API', () => {
expect(loadCalls).toEqual([ [ 1, 2 ], [ 3 ] ]);
});

it('batches cached requests', async () => {
const loadCalls = [];
let resolveBatch = () => {};
const identityLoader = new DataLoader<number, number>(keys => {
loadCalls.push(keys);
return new Promise(resolve => {
resolveBatch = () => resolve(keys);
});
});

identityLoader.prime(1, 1);

const promise1 = identityLoader.load(1);
const promise2 = identityLoader.load(2);

// Track when each resolves.
let promise1Resolved = false;
let promise2Resolved = false;
promise1.then(() => { promise1Resolved = true; });
promise2.then(() => { promise2Resolved = true; });

// Move to next macro-task (tick)
await new Promise(setImmediate);

expect(promise1Resolved).toBe(false);
expect(promise2Resolved).toBe(false);

resolveBatch();
// Move to next macro-task (tick)
await new Promise(setImmediate);

expect(promise1Resolved).toBe(true);
expect(promise2Resolved).toBe(true);

const [ value1, value2 ] = await Promise.all([ promise1, promise2 ]);
expect(value1).toBe(1);
expect(value2).toBe(2);

expect(loadCalls).toEqual([ [ 2 ] ]);
});

it('coalesces identical requests', async () => {
const [ identityLoader, loadCalls ] = idLoader<number>();

const promise1a = identityLoader.load(1);
const promise1b = identityLoader.load(1);

expect(promise1a).toBe(promise1b);

const [ value1a, value1b ] = await Promise.all([ promise1a, promise1b ]);
expect(value1a).toBe(1);
expect(value1b).toBe(1);
Expand Down
33 changes: 30 additions & 3 deletions src/index.js
Expand Up @@ -84,7 +84,10 @@ class DataLoader<K, V, C = K> {
if (cache) {
var cachedPromise = cache.get(cacheKey);
if (cachedPromise) {
return cachedPromise;
var cacheHits = batch.cacheHits || (batch.cacheHits = []);
return new Promise(resolve => {
cacheHits.push(() => resolve(cachedPromise));
});
}
}

Expand Down Expand Up @@ -229,7 +232,8 @@ type Batch<K, V> = {
callbacks: Array<{
resolve: (value: V) => void;
reject: (error: Error) => void;
}>
}>,
cacheHits?: Array<() => void>
}

// Private: Either returns the current batch, or creates and schedules a
Expand All @@ -246,7 +250,10 @@ function getCurrentBatch<K, V>(loader: DataLoader<K, V, any>): Batch<K, V> {
if (
existingBatch !== null &&
!existingBatch.hasDispatched &&
(maxBatchSize === 0 || existingBatch.keys.length < maxBatchSize)
(maxBatchSize === 0 ||
(existingBatch.keys.length < maxBatchSize &&
(!existingBatch.cacheHits ||
existingBatch.cacheHits.length < maxBatchSize)))
) {
return existingBatch;
}
Expand All @@ -270,6 +277,12 @@ function dispatchBatch<K, V>(
// Mark this batch as having been dispatched.
batch.hasDispatched = true;

// If there's nothing to load, resolve any cache hits and return early.
if (batch.keys.length === 0) {
resolveCacheHits(batch);
return;
}

// Call the provided batchLoadFn for this loader with the batch's keys and
// with the loader as the `this` context.
var batchPromise = loader._batchLoadFn(batch.keys);
Expand Down Expand Up @@ -305,6 +318,9 @@ function dispatchBatch<K, V>(
);
}

// First resolve all cache hits.
resolveCacheHits(batch);

// Step through values, resolving or rejecting each Promise in the batch.
for (var i = 0; i < batch.callbacks.length; i++) {
var value = values[i];
Expand All @@ -324,12 +340,23 @@ function failedDispatch<K, V>(
batch: Batch<K, V>,
error: Error
) {
// Cache hits are resolved, even though the batch failed.
resolveCacheHits(batch);
for (var i = 0; i < batch.keys.length; i++) {
loader.clear(batch.keys[i]);
batch.callbacks[i].reject(error);
}
}

// Private: Resolves the Promises for any cache hits in this batch.
function resolveCacheHits(batch: Batch<any, any>) {
if (batch.cacheHits) {
for (var i = 0; i < batch.cacheHits.length; i++) {
batch.cacheHits[i]();
}
}
}

// Private: produce a cache key for a given key (and options)
function getCacheKey<K, V, C>(
options: ?Options<K, V, C>,
Expand Down

0 comments on commit 5f7d2d7

Please sign in to comment.