Skip to content

Commit

Permalink
Resolve all options during construction (#226)
Browse files Browse the repository at this point in the history
  • Loading branch information
leebyron committed Nov 17, 2019
1 parent cde0cd8 commit 01c829d
Show file tree
Hide file tree
Showing 5 changed files with 126 additions and 68 deletions.
6 changes: 3 additions & 3 deletions README.md
Expand Up @@ -344,10 +344,10 @@ Create a new `DataLoader` given a batch loading function and options.
| Option Key | Type | Default | Description |
| ---------- | ---- | ------- | ----------- |
| *batch* | Boolean | `true` | Set to `false` to disable batching, invoking `batchLoadFn` with a single load key. This is equivalent to setting `maxBatchSize` to `1`.
| *maxBatchSize* | Number | `Infinity` | Limits the number of items that get passed in to the `batchLoadFn`.
| *cache* | Boolean | `true` | Set to `false` to disable memoization caching, creating a new Promise and new key in the `batchLoadFn` for every load of the same key.
| *maxBatchSize* | Number | `Infinity` | Limits the number of items that get passed in to the `batchLoadFn`. May be set to `1` to disable batching.
| *cache* | Boolean | `true` | Set to `false` to disable memoization caching, creating a new Promise and new key in the `batchLoadFn` for every load of the same key. This is equivalent to setting `cacheMap` to `null`.
| *cacheKeyFn* | Function | `key => key` | Produces cache key for a given load key. Useful when objects are keys and two objects should be considered equivalent.
| *cacheMap* | Object | `new Map()` | Instance of [Map][] (or an object with a similar API) to be used as cache.
| *cacheMap* | Object | `new Map()` | Instance of [Map][] (or an object with a similar API) to be used as cache. May be set to `null` to disable caching.

##### `load(key)`

Expand Down
20 changes: 20 additions & 0 deletions src/__tests__/abuse.test.js
Expand Up @@ -170,4 +170,24 @@ describe('Provides descriptive error messages for API abuse', () => {
'Custom cacheMap missing methods: set, delete, clear'
);
});

it('Requires a number for maxBatchSize', () => {
expect(() =>
// $FlowExpectError
new DataLoader(async keys => keys, { maxBatchSize: null })
).toThrow('maxBatchSize must be a positive number: null');
});

it('Requires a positive number for maxBatchSize', () => {
expect(() =>
new DataLoader(async keys => keys, { maxBatchSize: 0 })
).toThrow('maxBatchSize must be a positive number: 0');
});

it('Requires a function for cacheKeyFn', () => {
expect(() =>
// $FlowExpectError
new DataLoader(async keys => keys, { cacheKeyFn: null })
).toThrow('cacheKeyFn must be a function: null');
});
});
24 changes: 24 additions & 0 deletions src/__tests__/dataloader.test.js
Expand Up @@ -46,6 +46,21 @@ describe('Primary API', () => {
expect(that).toBe(loader);
});

it('references the loader as "this" in the cache key function', async () => {
let that;
const loader = new DataLoader<number, number>(async keys => keys, {
cacheKeyFn(key) {
that = this;
return key;
}
});

// Trigger the cache key function
await loader.load(1);

expect(that).toBe(loader);
});

it('supports loading multiple keys in one call', async () => {
const identityLoader = new DataLoader<number, number>(async keys => keys);

Expand Down Expand Up @@ -658,6 +673,15 @@ describe('Accepts options', () => {
]);
});

it('cacheMap may be set to null to disable cache', async () => {
const [ identityLoader, loadCalls ] = idLoader<string>({ cacheMap: null });

await identityLoader.load('A');
await identityLoader.load('A');

expect(loadCalls).toEqual([ [ 'A' ], [ 'A' ] ]);
});

it('Does not interact with a cache when cache is disabled', () => {
const promiseX = Promise.resolve('X');
const cacheMap = new Map([ [ 'X', promiseX ] ]);
Expand Down
29 changes: 13 additions & 16 deletions src/index.d.ts
Expand Up @@ -78,39 +78,36 @@ declare namespace DataLoader {
export type Options<K, V, C = K> = {

/**
* Default `true`. Set to `false` to disable batching,
* instead immediately invoking `batchLoadFn` with a
* single load key.
* Default `true`. Set to `false` to disable batching, invoking
* `batchLoadFn` with a single load key. This is equivalent to setting
* `maxBatchSize` to `1`.
*/
batch?: boolean,

/**
* Default `Infinity`. Limits the number of items that get
* passed in to the `batchLoadFn`.
* Default `Infinity`. Limits the number of items that get passed in to the
* `batchLoadFn`. May be set to `1` to disable batching.
*/
maxBatchSize?: number;

/**
* Default `true`. Set to `false` to disable memoization caching,
* instead creating a new Promise and new key in the `batchLoadFn` for every
* load of the same key.
* Default `true`. Set to `false` to disable memoization caching, creating a
* new Promise and new key in the `batchLoadFn` for every load of the same
* key. This is equivalent to setting `cacheMap` to `null`.
*/
cache?: boolean,

/**
* A function to produce a cache key for a given load key.
* Defaults to `key => key`. Useful to provide when JavaScript
* objects are keys and two similarly shaped objects should
* be considered equivalent.
* Default `key => key`. Produces cache key for a given load key. Useful
* when objects are keys and two objects should be considered equivalent.
*/
cacheKeyFn?: (key: K) => C,

/**
* An instance of Map (or an object with a similar API) to
* be used as the underlying cache for this loader.
* Default `new Map()`.
* Default `new Map()`. Instance of `Map` (or an object with a similar API)
* to be used as cache. May be set to `null` to disable caching.
*/
cacheMap?: CacheMap<C, Promise<V>>;
cacheMap?: CacheMap<C, Promise<V>> | null;
}
}

Expand Down
115 changes: 66 additions & 49 deletions src/index.js
Expand Up @@ -19,7 +19,7 @@ export type Options<K, V, C = K> = {
maxBatchSize?: number;
cache?: boolean;
cacheKeyFn?: (key: K) => C;
cacheMap?: CacheMap<C, Promise<V>>;
cacheMap?: CacheMap<C, Promise<V>> | null;
};

// If a custom cache is provided, it must be of this type (a subset of ES6 Map).
Expand Down Expand Up @@ -52,15 +52,17 @@ class DataLoader<K, V, C = K> {
);
}
this._batchLoadFn = batchLoadFn;
this._options = options;
this._promiseCache = getValidCacheMap(options);
this._maxBatchSize = getValidMaxBatchSize(options);
this._cacheKeyFn = getValidCacheKeyFn(options);
this._cacheMap = getValidCacheMap(options);
this._batch = null;
}

// Private
_batchLoadFn: BatchLoadFn<K, V>;
_options: ?Options<K, V, C>;
_promiseCache: ?CacheMap<C, Promise<V>>;
_maxBatchSize: number;
_cacheKeyFn: K => C;
_cacheMap: CacheMap<C, Promise<V>> | null;
_batch: Batch<K, V> | null;

/**
Expand All @@ -74,15 +76,13 @@ class DataLoader<K, V, C = K> {
);
}

// Determine options
var options = this._options;
var batch = getCurrentBatch(this);
var cache = this._promiseCache;
var cacheKey = getCacheKey(options, key);
var cacheMap = this._cacheMap;
var cacheKey = this._cacheKeyFn(key);

// If caching and there is a cache-hit, return cached Promise.
if (cache) {
var cachedPromise = cache.get(cacheKey);
if (cacheMap) {
var cachedPromise = cacheMap.get(cacheKey);
if (cachedPromise) {
var cacheHits = batch.cacheHits || (batch.cacheHits = []);
return new Promise(resolve => {
Expand All @@ -99,8 +99,8 @@ class DataLoader<K, V, C = K> {
});

// If caching, cache this promise.
if (cache) {
cache.set(cacheKey, promise);
if (cacheMap) {
cacheMap.set(cacheKey, promise);
}

return promise;
Expand Down Expand Up @@ -146,10 +146,10 @@ class DataLoader<K, V, C = K> {
* method chaining.
*/
clear(key: K): this {
var cache = this._promiseCache;
if (cache) {
var cacheKey = getCacheKey(this._options, key);
cache.delete(cacheKey);
var cacheMap = this._cacheMap;
if (cacheMap) {
var cacheKey = this._cacheKeyFn(key);
cacheMap.delete(cacheKey);
}
return this;
}
Expand All @@ -160,9 +160,9 @@ class DataLoader<K, V, C = K> {
* method chaining.
*/
clearAll(): this {
var cache = this._promiseCache;
if (cache) {
cache.clear();
var cacheMap = this._cacheMap;
if (cacheMap) {
cacheMap.clear();
}
return this;
}
Expand All @@ -174,12 +174,12 @@ class DataLoader<K, V, C = K> {
* To prime the cache with an error at a key, provide an Error instance.
*/
prime(key: K, value: V | Error): this {
var cache = this._promiseCache;
if (cache) {
var cacheKey = getCacheKey(this._options, key);
var cacheMap = this._cacheMap;
if (cacheMap) {
var cacheKey = this._cacheKeyFn(key);

// Only add the key if it does not already exist.
if (cache.get(cacheKey) === undefined) {
if (cacheMap.get(cacheKey) === undefined) {
// Cache a rejected promise if the value is an Error, in order to match
// the behavior of load(key).
var promise;
Expand All @@ -191,7 +191,7 @@ class DataLoader<K, V, C = K> {
} else {
promise = Promise.resolve(value);
}
cache.set(cacheKey, promise);
cacheMap.set(cacheKey, promise);
}
}
return this;
Expand Down Expand Up @@ -251,21 +251,15 @@ type Batch<K, V> = {
// Private: Either returns the current batch, or creates and schedules a
// dispatch of a new batch for the given loader.
function getCurrentBatch<K, V>(loader: DataLoader<K, V, any>): Batch<K, V> {
var options = loader._options;
var maxBatchSize =
(options && options.maxBatchSize) ||
(options && options.batch === false ? 1 : 0);

// If there is an existing batch which has not yet dispatched and is within
// the limit of the batch size, then return it.
var existingBatch = loader._batch;
if (
existingBatch !== null &&
!existingBatch.hasDispatched &&
(maxBatchSize === 0 ||
(existingBatch.keys.length < maxBatchSize &&
(!existingBatch.cacheHits ||
existingBatch.cacheHits.length < maxBatchSize)))
existingBatch.keys.length < loader._maxBatchSize &&
(!existingBatch.cacheHits ||
existingBatch.cacheHits.length < loader._maxBatchSize)
) {
return existingBatch;
}
Expand Down Expand Up @@ -369,34 +363,57 @@ function resolveCacheHits(batch: Batch<any, any>) {
}
}

// Private: produce a cache key for a given key (and options)
function getCacheKey<K, V, C>(
options: ?Options<K, V, C>,
key: K
): C {
// Private: given the DataLoader's options, produce a valid max batch size.
function getValidMaxBatchSize(options: ?Options<any, any, any>): number {
var shouldBatch = !options || options.batch !== false;
if (!shouldBatch) {
return 1;
}
var maxBatchSize = options && options.maxBatchSize;
if (maxBatchSize === undefined) {
return Infinity;
}
if (typeof maxBatchSize !== 'number' || maxBatchSize < 1) {
throw new TypeError(
`maxBatchSize must be a positive number: ${(maxBatchSize: any)}`
);
}
return maxBatchSize;
}

// Private: given the DataLoader's options, produce a cache key function.
function getValidCacheKeyFn<K, C>(options: ?Options<K, any, C>): (K => C) {
var cacheKeyFn = options && options.cacheKeyFn;
return cacheKeyFn ? cacheKeyFn(key) : (key: any);
if (cacheKeyFn === undefined) {
return (key => key: any);
}
if (typeof cacheKeyFn !== 'function') {
throw new TypeError(`cacheKeyFn must be a function: ${(cacheKeyFn: any)}`);
}
return cacheKeyFn;
}

// Private: given the DataLoader's options, produce a CacheMap to be used.
function getValidCacheMap<K, V, C>(
options: ?Options<K, V, C>
): ?CacheMap<C, Promise<V>> {
): CacheMap<C, Promise<V>> | null {
var shouldCache = !options || options.cache !== false;
if (!shouldCache) {
return null;
}
var cacheMap = options && options.cacheMap;
if (!cacheMap) {
if (cacheMap === undefined) {
return new Map();
}
var cacheFunctions = [ 'get', 'set', 'delete', 'clear' ];
var missingFunctions = cacheFunctions
.filter(fnName => cacheMap && typeof cacheMap[fnName] !== 'function');
if (missingFunctions.length !== 0) {
throw new TypeError(
'Custom cacheMap missing methods: ' + missingFunctions.join(', ')
);
if (cacheMap !== null) {
var cacheFunctions = [ 'get', 'set', 'delete', 'clear' ];
var missingFunctions = cacheFunctions
.filter(fnName => cacheMap && typeof cacheMap[fnName] !== 'function');
if (missingFunctions.length !== 0) {
throw new TypeError(
'Custom cacheMap missing methods: ' + missingFunctions.join(', ')
);
}
}
return cacheMap;
}
Expand Down

0 comments on commit 01c829d

Please sign in to comment.