Skip to content

Commit

Permalink
fix: maxBatchSize parameter was not being sent to loader (#321)
Browse files Browse the repository at this point in the history
* Fixes #294.

* Added changeset for fix.
  • Loading branch information
thekevinbrown committed Dec 6, 2022
1 parent 26136b0 commit 3cd3a43
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 5 deletions.
5 changes: 5 additions & 0 deletions .changeset/swift-buckets-drop.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"dataloader": patch
---

Resolves an issue where the maxBatchSize parameter wouldn't be fully used on each batch sent to the backend loader.
20 changes: 18 additions & 2 deletions src/__tests__/dataloader.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,21 @@ describe('Primary API', () => {
expect(loadCalls).toEqual([ [ 1, 2 ], [ 3 ] ]);
});

it('applies maxBatchSize correctly with duplicate keys', async () => {
const [ identityLoader, loadCalls ] = idLoader<string>({
maxBatchSize: 3,
batchScheduleFn: callback => { setTimeout(callback, 100); },
});

const values = [ 'a', 'b', 'a', 'a', 'a', 'b', 'c' ];
const results = await Promise.all(values.map(
value => identityLoader.load(value)
));

expect(results).toEqual(values);
expect(loadCalls).toEqual([ [ 'a', 'b', 'c' ] ]);
});

it('batches cached requests', async () => {
const loadCalls = [];
let resolveBatch = () => {};
Expand Down Expand Up @@ -185,8 +200,9 @@ describe('Primary API', () => {
// Move to next macro-task (tick)
await new Promise(setImmediate);

// Promise 1 resolves first since max batch size is 1
expect(promise1Resolved).toBe(true);
// Promise 1 resolves first since max batch size is 1,
// but it still hasn't resolved yet.
expect(promise1Resolved).toBe(false);
expect(promise2Resolved).toBe(false);

resolveBatch();
Expand Down
4 changes: 1 addition & 3 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -268,9 +268,7 @@ function getCurrentBatch<K, V>(loader: DataLoader<K, V, any>): Batch<K, V> {
if (
existingBatch !== null &&
!existingBatch.hasDispatched &&
existingBatch.keys.length < loader._maxBatchSize &&
(!existingBatch.cacheHits ||
existingBatch.cacheHits.length < loader._maxBatchSize)
existingBatch.keys.length < loader._maxBatchSize
) {
return existingBatch;
}
Expand Down

0 comments on commit 3cd3a43

Please sign in to comment.