Skip to content

Commit

Permalink
Also update examples/*
Browse files Browse the repository at this point in the history
  • Loading branch information
leebyron committed Nov 13, 2019
1 parent 38e7df2 commit cbc3ff7
Show file tree
Hide file tree
Showing 6 changed files with 84 additions and 71 deletions.
20 changes: 9 additions & 11 deletions examples/CouchDB.md
Expand Up @@ -8,13 +8,13 @@ This example uses the [nano][] CouchDB client which offers a `fetch` method
supporting the bulk document API.

```js
var DataLoader = require('dataloader');
var nano = require('nano');
const DataLoader = require('dataloader');
const nano = require('nano');

var couch = nano('http://localhost:5984');
const couch = nano('http://localhost:5984');

var userDB = couch.use('users');
var userLoader = new DataLoader(keys => new Promise((resolve, reject) => {
const userDB = couch.use('users');
const userLoader = new DataLoader(keys => new Promise((resolve, reject) => {
userDB.fetch({ keys: keys }, (error, docs) => {
if (error) {
return reject(error);
Expand All @@ -25,12 +25,10 @@ var userLoader = new DataLoader(keys => new Promise((resolve, reject) => {

// Usage

var promise1 = userLoader.load('8fce1902834ac6458e9886fa7f89c0ef');
var promise2 = userLoader.load('00a271787f89c0ef2e10e88a0c00048b');

Promise.all([ promise1, promise2 ]).then(([ user1, user2]) => {
console.log(user1, user2);
});
const promise1 = userLoader.load('8fce1902834ac6458e9886fa7f89c0ef');
const promise2 = userLoader.load('00a271787f89c0ef2e10e88a0c00048b');
const [ user1, user2 ] = await Promise.all([ promise1, promise2 ])
console.log(user1, user2);
```

[nano]: https://github.com/dscape/nano
6 changes: 3 additions & 3 deletions examples/GoogleDatastore.md
Expand Up @@ -10,16 +10,16 @@ const Datastore = require('@google-cloud/datastore');

const datastore = new Datastore();

const datastoreLoader = new DataLoader(keys =>
datastore.get(keys).then(results => {
const datastoreLoader = new DataLoader(async keys => {
const results = await datastore.get(keys)
// Sort resulting entities by the keys they were requested with.
const entities = results[0];
const entitiesByKey = {};
entities.forEach(entity => {
entitiesByKey[JSON.stringify(entity[datastore.KEY])] = entity;
});
return keys.map(key => entitiesByKey[JSON.stringify(key)] || null);
}),
},
{
// Datastore complex keys need to be converted to a string for use as cache keys
cacheKeyFn: key => JSON.stringify(key),
Expand Down
10 changes: 5 additions & 5 deletions examples/Knex.md
Expand Up @@ -14,7 +14,7 @@ const db = require('./db'); // an instance of Knex client

// The list of data loaders

const data = {
const loaders = {
user: new DataLoader(ids => db.table('users')
.whereIn('id', ids).select()
.then(rows => ids.map(id => rows.find(x => x.id === id)))),
Expand All @@ -30,10 +30,10 @@ const data = {

// Usage

Promise.all([
data.user.load('1234'),
data.storiesByUserId.load('1234'),
]).then(([user, stories]) => {/* ... */});
const [user, stories] = await Promise.all([
loaders.user.load('1234'),
loaders.storiesByUserId.load('1234'),
])
```

For a complete example visit [kriasoft/nodejs-api-starter][nsk].
Expand Down
8 changes: 4 additions & 4 deletions examples/Redis.md
Expand Up @@ -7,12 +7,12 @@ with DataLoader.
Here we build an example Redis DataLoader using [node_redis][].

```js
var DataLoader = require('dataloader');
var redis = require('redis');
const DataLoader = require('dataloader');
const redis = require('redis');

var client = redis.createClient();
const client = redis.createClient();

var redisLoader = new DataLoader(keys => new Promise((resolve, reject) => {
const redisLoader = new DataLoader(keys => new Promise((resolve, reject) => {
client.mget(keys, (error, results) => {
if (error) {
return reject(error);
Expand Down
60 changes: 39 additions & 21 deletions examples/RethinkDB.md
Expand Up @@ -5,73 +5,91 @@ RethinkDb offers a batching method called `getAll` but there are a few caveats :
* Non-existent keys will not return an empty record

For example, against a table `example_table` with these records:

```js
[
{"id": 1, "name": "Document 1"},
{"id": 2, "name": "Document 2"}
]
```

A query `r.getAll(1, 2, 3)` could return:

```js
[
{"id": 2, "name": "Document 2"},
{"id": 1, "name": "Document 1"}
]
```

Because query keys and values are associated by position in the dataloader cache, this naive implementation won't work (with the same table as above):
Because query keys and values are associated by position in the dataloader
cache, this naive implementation won't work (with the same table as above):

```js
var r = require('rethinkdb');
var db = await r.connect();
const r = require('rethinkdb');
const db = await r.connect();

var batchLoadFn = keys => db.table('example_table').getAll(...keys).then(res => res.toArray());
var exampleLoader = new DataLoader(batchLoadFn);
const exampleLoader = new DataLoader(async keys => {
const result = await db.table('example_table').getAll(...keys)
return result.toArray()
})

await exampleLoader.loadMany([1, 2, 3]); // Throws (values length !== keys length)

await exampleLoader.loadMany([1, 2]);
await exampleLoader.load(1); // {"id": 2, "name": "Document 2"}
```

A solution is to normalize results returned by `getAll` to match the structure of supplied `keys`.
A solution is to normalize results returned by `getAll` to match the structure
of supplied `keys`.

To achieve this efficiently, we first write an indexing function. This function
will return a `Map` indexing results.

To achieve this efficiently, we first write an indexing function. This function will return a `Map` indexing results.
Parameters:
* `results`: Array of RethinkDb results
* `indexField`: String indicating which field was used as index for this batch query
* `cacheKeyFn`: Optional function used to serialize non-scalar index field values

```js
function indexResults(results, indexField, cacheKeyFn = key => key) {
var indexedResults = new Map();
const indexedResults = new Map();
results.forEach(res => {
indexedResults.set(cacheKeyFn(res[indexField]), res);
});
return indexedResults;
}
```
Then, we can leverage our Map to normalize RethinkDb results with another utility function which will produce a normalizing function.

Then, we can leverage our Map to normalize RethinkDb results with another
utility function which will produce a normalizing function.

```js
function normalizeRethinkDbResults(keys, indexField, cacheKeyFn = key => key) {
return results => {
var indexedResults = indexResults(results, indexField, cacheKeyFn);
return keys.map(val => indexedResults.get(cacheKeyFn(val)) || new Error(`Key not found : ${val}`));
const indexedResults = indexResults(results, indexField, cacheKeyFn);
return keys.map(
val => indexedResults.get(cacheKeyFn(val))
|| new Error(`Key not found : ${val}`)
);
}
}
```

Full dataloader implementation:
```js
var r = require('rethinkdb');
var db = await r.connect();

var batchLoadFn = keys => db.table('example_table')
.getAll(...keys)
.then(res => res.toArray())
.then(normalizeRethinkDbResults(keys, 'id'));
```js
const r = require('rethinkdb');
const db = await r.connect();

var exampleLoader = new DataLoader(batchLoadFn);
const exampleLoader = new DataLoader(async keys => {
const results = await db.table('example_table').getAll(...keys)
return normalizeRethinkDbResults(res.toArray(), 'id')
})

await exampleLoader.loadMany([1, 2, 3]); // [{"id": 1, "name": "Document 1"}, {"id": 2, "name": "Document 2"}, Error];
// [{"id": 1, "name": "Document 1"}, {"id": 2, "name": "Document 2"}, Error];
await exampleLoader.loadMany([1, 2, 3]);

await exampleLoader.load(1); // {"id": 1, "name": "Document 1"}
// {"id": 1, "name": "Document 1"}
await exampleLoader.load(1);
```
51 changes: 24 additions & 27 deletions examples/SQL.md
Expand Up @@ -10,46 +10,43 @@ further batch queries together. Another non-caching `DataLoader` utilizes this
method to provide a similar API. `DataLoaders` can access other `DataLoaders`.

```js
var DataLoader = require('dataloader');
var sqlite3 = require('sqlite3');
const DataLoader = require('dataloader')
const sqlite3 = require('sqlite3')

var db = new sqlite3.Database('./to/your/db.sql');
const db = new sqlite3.Database('./to/your/db.sql')

// Dispatch a WHERE-IN query, ensuring response has rows in correct order.
var userLoader = new DataLoader(ids => {
var params = ids.map(id => '?' ).join();
var query = `SELECT * FROM users WHERE id IN (${params})`;
return queryLoader.load([query, ids]).then(
rows => ids.map(
id => rows.find(row => row.id === id) || new Error(`Row not found: ${id}`)
)
);
});
const userLoader = new DataLoader(async ids => {
const params = ids.map(id => '?' ).join()
const query = `SELECT * FROM users WHERE id IN (${params})`
const rows = await queryLoader.load([query, ids])
return ids.map(
id => rows.find(row => row.id === id) || new Error(`Row not found: ${id}`)
)
})

// Parallelize all queries, but do not cache.
var queryLoader = new DataLoader(queries => new Promise(resolve => {
var waitingOn = queries.length;
var results = [];
const queryLoader = new DataLoader(queries => new Promise(resolve => {
const waitingOn = queries.length
const results = []
db.parallelize(() => {
queries.forEach((query, index) => {
db.all.apply(db, query.concat((error, result) => {
results[index] = error || result;
results[index] = error || result
if (--waitingOn === 0) {
resolve(results);
resolve(results)
}
}));
});
});
}), { cache: false });
}))
})
})
}), { cache: false })

// Usage

var promise1 = userLoader.load('1234');
var promise2 = userLoader.load('5678');

Promise.all([ promise1, promise2 ]).then(([ user1, user2]) => {
console.log(user1, user2);
});
const promise1 = userLoader.load('1234')
const promise2 = userLoader.load('5678')
const [ user1, user2 ] = await Promise.all([promise1, promise2])
console.log(user1, user2)
```

[sqlite3]: https://github.com/mapbox/node-sqlite3

0 comments on commit cbc3ff7

Please sign in to comment.