Socket
Socket
Sign inDemoInstall

dataloader

Package Overview
Dependencies
Maintainers
2
Versions
10
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

dataloader - npm Package Compare versions

Comparing version 1.4.0 to 2.0.0

63

index.d.ts
/**
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
* Copyright (c) 2019-present, GraphQL Foundation
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/

@@ -20,5 +18,5 @@

*/
declare class DataLoader<K, V> {
declare class DataLoader<K, V, C = K> {
constructor(batchLoadFn: DataLoader.BatchLoadFn<K, V>, options?: DataLoader.Options<K, V>);
constructor(batchLoadFn: DataLoader.BatchLoadFn<K, V>, options?: DataLoader.Options<K, V, C>);

@@ -43,3 +41,3 @@ /**

*/
loadMany(keys: K[]): Promise<V[]>;
loadMany(keys: ArrayLike<K>): Promise<Array<V | Error>>;

@@ -50,3 +48,3 @@ /**

*/
clear(key: K): DataLoader<K, V>;
clear(key: K): this;

@@ -58,3 +56,3 @@ /**

*/
clearAll(): DataLoader<K, V>;
clearAll(): this;

@@ -65,3 +63,3 @@ /**

*/
prime(key: K, value: V): DataLoader<K, V>;
prime(key: K, value: V | Error): this;
}

@@ -80,12 +78,13 @@

// of values or Errors.
export type BatchLoadFn<K, V> = (keys: K[]) => Promise<Array<V | Error>>;
export type BatchLoadFn<K, V> =
(keys: ReadonlyArray<K>) => PromiseLike<ArrayLike<V | Error>>;
// Optionally turn off batching or caching or provide a cache key function or a
// custom cache instance.
export type Options<K, V> = {
export type Options<K, V, C = K> = {
/**
* Default `true`. Set to `false` to disable batching,
* instead immediately invoking `batchLoadFn` with a
* single load key.
* Default `true`. Set to `false` to disable batching, invoking
* `batchLoadFn` with a single load key. This is equivalent to setting
* `maxBatchSize` to `1`.
*/

@@ -95,4 +94,4 @@ batch?: boolean,

/**
* Default `Infinity`. Limits the number of items that get
* passed in to the `batchLoadFn`.
* Default `Infinity`. Limits the number of items that get passed in to the
* `batchLoadFn`. May be set to `1` to disable batching.
*/

@@ -102,22 +101,26 @@ maxBatchSize?: number;

/**
* Default `true`. Set to `false` to disable memoization caching,
* instead creating a new Promise and new key in the `batchLoadFn` for every
* load of the same key.
* Default see https://github.com/graphql/dataloader#batch-scheduling.
* A function to schedule the later execution of a batch. The function is
* expected to call the provided callback in the immediate future.
*/
batchScheduleFn?: (callback: () => void) => void;
/**
* Default `true`. Set to `false` to disable memoization caching, creating a
* new Promise and new key in the `batchLoadFn` for every load of the same
* key. This is equivalent to setting `cacheMap` to `null`.
*/
cache?: boolean,
/**
* A function to produce a cache key for a given load key.
* Defaults to `key => key`. Useful to provide when JavaScript
* objects are keys and two similarly shaped objects should
* be considered equivalent.
* Default `key => key`. Produces cache key for a given load key. Useful
* when objects are keys and two objects should be considered equivalent.
*/
cacheKeyFn?: (key: any) => any,
cacheKeyFn?: (key: K) => C,
/**
* An instance of Map (or an object with a similar API) to
* be used as the underlying cache for this loader.
* Default `new Map()`.
* Default `new Map()`. Instance of `Map` (or an object with a similar API)
* to be used as cache. May be set to `null` to disable caching.
*/
cacheMap?: CacheMap<K, Promise<V>>;
cacheMap?: CacheMap<C, Promise<V>> | null;
}

@@ -124,0 +127,0 @@ }

@@ -1,4 +0,18 @@

function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
"use strict";
/**
* Copyright (c) 2019-present, GraphQL Foundation
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
*
*/
// A Function, which when given an Array of keys, returns a Promise of an Array
// of values or Errors.
// Optionally turn off batching or caching or provide a cache key function or a
// custom cache instance.
// If a custom cache is provided, it must be of this type (a subset of ES6 Map).
/**
* A `DataLoader` creates a public API for loading data from a particular

@@ -13,21 +27,20 @@ * data back-end with unique keys such as the `id` column of a SQL table or

*/
// Optionally turn off batching or caching or provide a cache key function or a
// custom cache instance.
var DataLoader = function () {
var DataLoader =
/*#__PURE__*/
function () {
function DataLoader(batchLoadFn, options) {
_classCallCheck(this, DataLoader);
if (typeof batchLoadFn !== 'function') {
throw new TypeError('DataLoader must be constructed with a function which accepts ' + ('Array<key> and returns Promise<Array<value>>, but got: ' + batchLoadFn + '.'));
throw new TypeError('DataLoader must be constructed with a function which accepts ' + ("Array<key> and returns Promise<Array<value>>, but got: " + batchLoadFn + "."));
}
this._batchLoadFn = batchLoadFn;
this._options = options;
this._promiseCache = getValidCacheMap(options);
this._queue = [];
}
this._maxBatchSize = getValidMaxBatchSize(options);
this._batchScheduleFn = getValidBatchScheduleFn(options);
this._cacheKeyFn = getValidCacheKeyFn(options);
this._cacheMap = getValidCacheMap(options);
this._batch = null;
} // Private
// Private
var _proto = DataLoader.prototype;

@@ -37,53 +50,42 @@ /**

*/
DataLoader.prototype.load = function load(key) {
var _this = this;
_proto.load = function load(key) {
if (key === null || key === undefined) {
throw new TypeError('The loader.load() function must be called with a value,' + ('but got: ' + String(key) + '.'));
throw new TypeError('The loader.load() function must be called with a value,' + ("but got: " + String(key) + "."));
}
// Determine options
var options = this._options;
var shouldBatch = !options || options.batch !== false;
var shouldCache = !options || options.cache !== false;
var cacheKeyFn = options && options.cacheKeyFn;
var cacheKey = cacheKeyFn ? cacheKeyFn(key) : key;
var batch = getCurrentBatch(this);
var cacheMap = this._cacheMap;
// If caching and there is a cache-hit, return cached Promise.
if (shouldCache) {
var cachedPromise = this._promiseCache.get(cacheKey);
var cacheKey = this._cacheKeyFn(key); // If caching and there is a cache-hit, return cached Promise.
if (cacheMap) {
var cachedPromise = cacheMap.get(cacheKey);
if (cachedPromise) {
return cachedPromise;
var cacheHits = batch.cacheHits || (batch.cacheHits = []);
return new Promise(function (resolve) {
cacheHits.push(function () {
return resolve(cachedPromise);
});
});
}
}
} // Otherwise, produce a new Promise for this key, and enqueue it to be
// dispatched along with the current batch.
// Otherwise, produce a new Promise for this value.
batch.keys.push(key);
var promise = new Promise(function (resolve, reject) {
// Enqueue this Promise to be dispatched.
_this._queue.push({ key: key, resolve: resolve, reject: reject });
batch.callbacks.push({
resolve: resolve,
reject: reject
});
}); // If caching, cache this promise.
// Determine if a dispatch of this queue should be scheduled.
// A single dispatch should be scheduled per queue at the time when the
// queue changes from "empty" to "full".
if (_this._queue.length === 1) {
if (shouldBatch) {
// If batching, schedule a task to dispatch the queue.
enqueuePostPromiseJob(function () {
return dispatchQueue(_this);
});
} else {
// Otherwise dispatch the (queue of one) immediately.
dispatchQueue(_this);
}
}
});
// If caching, cache this promise.
if (shouldCache) {
this._promiseCache.set(cacheKey, promise);
if (cacheMap) {
cacheMap.set(cacheKey, promise);
}
return promise;
};
}
/**

@@ -94,3 +96,3 @@ * Loads multiple keys, promising an array of values:

*
* This is equivalent to the more verbose:
* This is similar to the more verbose:
*

@@ -102,16 +104,28 @@ * var [ a, b ] = await Promise.all([

*
* However it is different in the case where any load fails. Where
* Promise.all() would reject, loadMany() always resolves, however each result
* is either a value or an Error instance.
*
* var [ a, b, c ] = await myLoader.loadMany([ 'a', 'b', 'badkey' ]);
* // c instanceof Error
*
*/
;
_proto.loadMany = function loadMany(keys) {
if (!isArrayLike(keys)) {
throw new TypeError('The loader.loadMany() function must be called with Array<key> ' + ("but got: " + keys + "."));
} // Support ArrayLike by using only minimal property access
DataLoader.prototype.loadMany = function loadMany(keys) {
var _this2 = this;
if (!Array.isArray(keys)) {
throw new TypeError('The loader.loadMany() function must be called with Array<key> ' + ('but got: ' + keys + '.'));
var loadPromises = [];
for (var i = 0; i < keys.length; i++) {
loadPromises.push(this.load(keys[i])["catch"](function (error) {
return error;
}));
}
return Promise.all(keys.map(function (key) {
return _this2.load(key);
}));
};
return Promise.all(loadPromises);
}
/**

@@ -121,11 +135,15 @@ * Clears the value at `key` from the cache, if it exists. Returns itself for

*/
;
_proto.clear = function clear(key) {
var cacheMap = this._cacheMap;
DataLoader.prototype.clear = function clear(key) {
var cacheKeyFn = this._options && this._options.cacheKeyFn;
var cacheKey = cacheKeyFn ? cacheKeyFn(key) : key;
this._promiseCache.delete(cacheKey);
if (cacheMap) {
var cacheKey = this._cacheKeyFn(key);
cacheMap["delete"](cacheKey);
}
return this;
};
}
/**

@@ -136,26 +154,44 @@ * Clears the entire cache. To be used when some event results in unknown

*/
;
_proto.clearAll = function clearAll() {
var cacheMap = this._cacheMap;
DataLoader.prototype.clearAll = function clearAll() {
this._promiseCache.clear();
if (cacheMap) {
cacheMap.clear();
}
return this;
};
}
/**
* Adds the provided key and value to the cache. If the key already
* exists, no change is made. Returns itself for method chaining.
*
* To prime the cache with an error at a key, provide an Error instance.
*/
;
_proto.prime = function prime(key, value) {
var cacheMap = this._cacheMap;
DataLoader.prototype.prime = function prime(key, value) {
var cacheKeyFn = this._options && this._options.cacheKeyFn;
var cacheKey = cacheKeyFn ? cacheKeyFn(key) : key;
if (cacheMap) {
var cacheKey = this._cacheKeyFn(key); // Only add the key if it does not already exist.
// Only add the key if it does not already exist.
if (this._promiseCache.get(cacheKey) === undefined) {
// Cache a rejected promise if the value is an Error, in order to match
// the behavior of load(key).
var promise = value instanceof Error ? Promise.reject(value) : Promise.resolve(value);
this._promiseCache.set(cacheKey, promise);
if (cacheMap.get(cacheKey) === undefined) {
// Cache a rejected promise if the value is an Error, in order to match
// the behavior of load(key).
var promise;
if (value instanceof Error) {
promise = Promise.reject(value); // Since this is a case where an Error is intentionally being primed
// for a given key, we want to disable unhandled promise rejection.
promise["catch"](function () {});
} else {
promise = Promise.resolve(value);
}
cacheMap.set(cacheKey, promise);
}
}

@@ -167,5 +203,3 @@

return DataLoader;
}();
// Private: Enqueue a Job to be executed after all "PromiseJobs" Jobs.
}(); // Private: Enqueue a Job to be executed after all "PromiseJobs" Jobs.
//

@@ -198,17 +232,2 @@ // ES6 JavaScript uses the concepts Job and JobQueue to schedule work to occur

// If a custom cache is provided, it must be of this type (a subset of ES6 Map).
/**
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
// A Function, which when given an Array of keys, returns a Promise of an Array
// of values or Errors.
var enqueuePostPromiseJob = typeof process === 'object' && typeof process.nextTick === 'function' ? function (fn) {

@@ -218,105 +237,190 @@ if (!resolvedPromise) {

}
resolvedPromise.then(function () {
return process.nextTick(fn);
});
} : setImmediate || setTimeout;
} : setImmediate || setTimeout; // Private: cached resolved Promise instance
// Private: cached resolved Promise instance
var resolvedPromise;
var resolvedPromise; // Private: Describes a batch of requests
// Private: given the current state of a Loader instance, perform a batch load
// from its current queue.
function dispatchQueue(loader) {
// Take the current loader queue, replacing it with an empty queue.
var queue = loader._queue;
loader._queue = [];
// Private: Either returns the current batch, or creates and schedules a
// dispatch of a new batch for the given loader.
function getCurrentBatch(loader) {
// If there is an existing batch which has not yet dispatched and is within
// the limit of the batch size, then return it.
var existingBatch = loader._batch;
// If a maxBatchSize was provided and the queue is longer, then segment the
// queue into multiple batches, otherwise treat the queue as a single batch.
var maxBatchSize = loader._options && loader._options.maxBatchSize;
if (maxBatchSize && maxBatchSize > 0 && maxBatchSize < queue.length) {
for (var i = 0; i < queue.length / maxBatchSize; i++) {
dispatchQueueBatch(loader, queue.slice(i * maxBatchSize, (i + 1) * maxBatchSize));
}
} else {
dispatchQueueBatch(loader, queue);
}
}
if (existingBatch !== null && !existingBatch.hasDispatched && existingBatch.keys.length < loader._maxBatchSize && (!existingBatch.cacheHits || existingBatch.cacheHits.length < loader._maxBatchSize)) {
return existingBatch;
} // Otherwise, create a new batch for this loader.
function dispatchQueueBatch(loader, queue) {
// Collect all keys to be loaded in this dispatch
var keys = queue.map(function (_ref) {
var key = _ref.key;
return key;
var newBatch = {
hasDispatched: false,
keys: [],
callbacks: []
}; // Store it on the loader so it may be reused.
loader._batch = newBatch; // Then schedule a task to dispatch this batch of requests.
loader._batchScheduleFn(function () {
return dispatchBatch(loader, newBatch);
});
// Call the provided batchLoadFn for this loader with the loader queue's keys.
var batchLoadFn = loader._batchLoadFn;
var batchPromise = batchLoadFn(keys);
return newBatch;
}
// Assert the expected response from batchLoadFn
function dispatchBatch(loader, batch) {
// Mark this batch as having been dispatched.
batch.hasDispatched = true; // If there's nothing to load, resolve any cache hits and return early.
if (batch.keys.length === 0) {
resolveCacheHits(batch);
return;
} // Call the provided batchLoadFn for this loader with the batch's keys and
// with the loader as the `this` context.
var batchPromise = loader._batchLoadFn(batch.keys); // Assert the expected response from batchLoadFn
if (!batchPromise || typeof batchPromise.then !== 'function') {
return failedDispatch(loader, queue, new TypeError('DataLoader must be constructed with a function which accepts ' + 'Array<key> and returns Promise<Array<value>>, but the function did ' + ('not return a Promise: ' + String(batchPromise) + '.')));
}
return failedDispatch(loader, batch, new TypeError('DataLoader must be constructed with a function which accepts ' + 'Array<key> and returns Promise<Array<value>>, but the function did ' + ("not return a Promise: " + String(batchPromise) + ".")));
} // Await the resolution of the call to batchLoadFn.
// Await the resolution of the call to batchLoadFn.
batchPromise.then(function (values) {
// Assert the expected resolution from batchLoadFn.
if (!Array.isArray(values)) {
throw new TypeError('DataLoader must be constructed with a function which accepts ' + 'Array<key> and returns Promise<Array<value>>, but the function did ' + ('not return a Promise of an Array: ' + String(values) + '.'));
if (!isArrayLike(values)) {
throw new TypeError('DataLoader must be constructed with a function which accepts ' + 'Array<key> and returns Promise<Array<value>>, but the function did ' + ("not return a Promise of an Array: " + String(values) + "."));
}
if (values.length !== keys.length) {
throw new TypeError('DataLoader must be constructed with a function which accepts ' + 'Array<key> and returns Promise<Array<value>>, but the function did ' + 'not return a Promise of an Array of the same length as the Array ' + 'of keys.' + ('\n\nKeys:\n' + String(keys)) + ('\n\nValues:\n' + String(values)));
}
// Step through the values, resolving or rejecting each Promise in the
// loaded queue.
queue.forEach(function (_ref2, index) {
var resolve = _ref2.resolve,
reject = _ref2.reject;
if (values.length !== batch.keys.length) {
throw new TypeError('DataLoader must be constructed with a function which accepts ' + 'Array<key> and returns Promise<Array<value>>, but the function did ' + 'not return a Promise of an Array of the same length as the Array ' + 'of keys.' + ("\n\nKeys:\n" + String(batch.keys)) + ("\n\nValues:\n" + String(values)));
} // Resolve all cache hits in the same micro-task as freshly loaded values.
var value = values[index];
resolveCacheHits(batch); // Step through values, resolving or rejecting each Promise in the batch.
for (var i = 0; i < batch.callbacks.length; i++) {
var value = values[i];
if (value instanceof Error) {
reject(value);
batch.callbacks[i].reject(value);
} else {
resolve(value);
batch.callbacks[i].resolve(value);
}
});
}).catch(function (error) {
return failedDispatch(loader, queue, error);
}
})["catch"](function (error) {
return failedDispatch(loader, batch, error);
});
}
// Private: do not cache individual loads if the entire batch dispatch fails,
} // Private: do not cache individual loads if the entire batch dispatch fails,
// but still reject each request so they do not hang.
function failedDispatch(loader, queue, error) {
queue.forEach(function (_ref3) {
var key = _ref3.key,
reject = _ref3.reject;
loader.clear(key);
reject(error);
});
}
// Private: given the DataLoader's options, produce a CacheMap to be used.
function failedDispatch(loader, batch, error) {
// Cache hits are resolved, even though the batch failed.
resolveCacheHits(batch);
for (var i = 0; i < batch.keys.length; i++) {
loader.clear(batch.keys[i]);
batch.callbacks[i].reject(error);
}
} // Private: Resolves the Promises for any cache hits in this batch.
function resolveCacheHits(batch) {
if (batch.cacheHits) {
for (var i = 0; i < batch.cacheHits.length; i++) {
batch.cacheHits[i]();
}
}
} // Private: given the DataLoader's options, produce a valid max batch size.
function getValidMaxBatchSize(options) {
var shouldBatch = !options || options.batch !== false;
if (!shouldBatch) {
return 1;
}
var maxBatchSize = options && options.maxBatchSize;
if (maxBatchSize === undefined) {
return Infinity;
}
if (typeof maxBatchSize !== 'number' || maxBatchSize < 1) {
throw new TypeError("maxBatchSize must be a positive number: " + maxBatchSize);
}
return maxBatchSize;
} // Private
function getValidBatchScheduleFn(options) {
var batchScheduleFn = options && options.batchScheduleFn;
if (batchScheduleFn === undefined) {
return enqueuePostPromiseJob;
}
if (typeof batchScheduleFn !== 'function') {
throw new TypeError("batchScheduleFn must be a function: " + batchScheduleFn);
}
return batchScheduleFn;
} // Private: given the DataLoader's options, produce a cache key function.
function getValidCacheKeyFn(options) {
var cacheKeyFn = options && options.cacheKeyFn;
if (cacheKeyFn === undefined) {
return function (key) {
return key;
};
}
if (typeof cacheKeyFn !== 'function') {
throw new TypeError("cacheKeyFn must be a function: " + cacheKeyFn);
}
return cacheKeyFn;
} // Private: given the DataLoader's options, produce a CacheMap to be used.
function getValidCacheMap(options) {
var shouldCache = !options || options.cache !== false;
if (!shouldCache) {
return null;
}
var cacheMap = options && options.cacheMap;
if (!cacheMap) {
if (cacheMap === undefined) {
return new Map();
}
var cacheFunctions = ['get', 'set', 'delete', 'clear'];
var missingFunctions = cacheFunctions.filter(function (fnName) {
return cacheMap && typeof cacheMap[fnName] !== 'function';
});
if (missingFunctions.length !== 0) {
throw new TypeError('Custom cacheMap missing methods: ' + missingFunctions.join(', '));
if (cacheMap !== null) {
var cacheFunctions = ['get', 'set', 'delete', 'clear'];
var missingFunctions = cacheFunctions.filter(function (fnName) {
return cacheMap && typeof cacheMap[fnName] !== 'function';
});
if (missingFunctions.length !== 0) {
throw new TypeError('Custom cacheMap missing methods: ' + missingFunctions.join(', '));
}
}
return cacheMap;
}
} // Private
// Private
function isArrayLike(x) {
return typeof x === 'object' && x !== null && typeof x.length === 'number' && (x.length === 0 || x.length > 0 && Object.prototype.hasOwnProperty.call(x, x.length - 1));
}
module.exports = DataLoader;

@@ -1,1 +0,29 @@

{"name":"dataloader","version":"1.4.0","description":"A data loading utility to reduce requests to a backend via batching and caching.","contributors":["Lee Byron <lee@leebyron.com> (http://leebyron.com/)","Daniel Schafer <dschafer@fb.com>","Nicholas Schrock <schrockn@fb.com>"],"license":"BSD-3-Clause","homepage":"https://github.com/facebook/dataloader","bugs":{"url":"https://github.com/facebook/dataloader/issues"},"repository":{"type":"git","url":"http://github.com/facebook/dataloader.git"},"main":"index.js","typings":"index.d.ts","files":["index.js","index.js.flow","index.d.ts","README.md","LICENSE","PATENTS"]}
{
"name": "dataloader",
"version": "2.0.0",
"description": "A data loading utility to reduce requests to a backend via batching and caching.",
"contributors": [
"Lee Byron <lee@leebyron.com> (http://leebyron.com/)",
"Daniel Schafer <dschafer@fb.com>",
"Nicholas Schrock <schrockn@fb.com>"
],
"license": "MIT",
"homepage": "https://github.com/graphql/dataloader",
"bugs": {
"url": "https://github.com/graphql/dataloader/issues"
},
"repository": {
"type": "git",
"url": "http://github.com/graphql/dataloader.git"
},
"main": "index.js",
"typings": "index.d.ts",
"files": [
"index.js",
"index.js.flow",
"index.d.ts",
"README.md",
"LICENSE",
"PATENTS"
]
}

@@ -7,4 +7,4 @@ # DataLoader

[![Build Status](https://travis-ci.org/facebook/dataloader.svg)](https://travis-ci.org/facebook/dataloader)
[![Coverage Status](https://coveralls.io/repos/facebook/dataloader/badge.svg?branch=master&service=github)](https://coveralls.io/github/facebook/dataloader?branch=master)
[![Build Status](https://travis-ci.org/graphql/dataloader.svg)](https://travis-ci.org/graphql/dataloader)
[![Coverage Status](https://coveralls.io/repos/graphql/dataloader/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql/dataloader?branch=master)

@@ -57,5 +57,5 @@ A port of the "Loader" API originally developed by [@schrockn][] at Facebook in

```js
var DataLoader = require('dataloader')
const DataLoader = require('dataloader')
var userLoader = new DataLoader(keys => myBatchGetUsers(keys));
const userLoader = new DataLoader(keys => myBatchGetUsers(keys))
```

@@ -71,10 +71,10 @@

```js
userLoader.load(1)
.then(user => userLoader.load(user.invitedByID))
.then(invitedBy => console.log(`User 1 was invited by ${invitedBy}`));
const user = await userLoader.load(1)
const invitedBy = await userLoader.load(user.invitedByID)
console.log(`User 1 was invited by ${invitedBy}`)
// Elsewhere in your application
userLoader.load(2)
.then(user => userLoader.load(user.lastInvitedID))
.then(lastInvited => console.log(`User 2 last invited ${lastInvited}`));
const user = await userLoader.load(2)
const lastInvited = await userLoader.load(user.lastInvitedID)
console.log(`User 2 last invited ${lastInvited}`)
```

@@ -96,4 +96,16 @@

A batch loading function accepts an Array of keys, and returns a Promise which
resolves to an Array of values. There are a few constraints that must be upheld:
resolves to an Array of values or Error instances. The loader itself is provided
as the `this` context.
```js
async function batchFunction(keys) {
const results = await db.fetchAllKeys(keys)
return keys.map(key => results[key] || new Error(`No result for ${key}`))
}
const loader = new DataLoader(batchFunction)
```
There are a few constraints this function must uphold:
* The Array of values must be the same length as the Array of keys.

@@ -123,3 +135,3 @@ * Each index in the Array of values must correspond to the same index in the Array of keys.

{ id: 9, name: 'Chicago' },
null,
null, // or perhaps `new Error()`
{ id: 1, name: 'New York' }

@@ -129,3 +141,55 @@ ]

#### Batch Scheduling
By default DataLoader will coalesce all individual loads which occur within a
single frame of execution before calling your batch function with all requested
keys. This ensures no additional latency while capturing many related requests
into a single batch. In fact, this is the same behavior used in Facebook's
original PHP implementation in 2010. See `enqueuePostPromiseJob` in the
[source code][] for more details about how this works.
However sometimes this behavior is not desirable or optimal. Perhaps you expect
requests to be spread out over a few subsequent ticks because of an existing use
of `setTimeout`, or you just want manual control over dispatching regardless of
the run loop. DataLoader allows providing a custom batch scheduler to provide
these or any other behaviors.
A custom scheduler is provided as `batchScheduleFn` in options. It must be a
function which is passed a callback and is expected to call that callback in the
immediate future to execute the batch request.
As an example, here is a batch scheduler which collects all requests over a
100ms window of time (and as a consequence, adds 100ms of latency):
```js
const myLoader = new DataLoader(myBatchFn, {
batchScheduleFn: callback => setTimeout(callback, 100)
})
```
As another example, here is a manually dispatched batch scheduler:
```js
function createScheduler() {
let callbacks = []
return {
schedule(callback) {
callbacks.push(callback)
},
dispatch() {
callbacks.forEach(callback => callback())
callbacks = []
}
}
}
const { schedule, dispatch } = createScheduler()
const myLoader = new DataLoader(myBatchFn, { batchScheduleFn: schedule })
myLoader.load(1)
myLoader.load(2)
dispatch()
```
## Caching

@@ -137,14 +201,4 @@

In addition to relieving pressure on your data storage, caching results per-request
also creates fewer objects which may relieve memory pressure on your application:
#### Caching Per-Request
```js
var userLoader = new DataLoader(...)
var promise1A = userLoader.load(1)
var promise1B = userLoader.load(1)
assert(promise1A === promise1B)
```
#### Caching per-Request
DataLoader caching *does not* replace Redis, Memcache, or any other shared

@@ -170,7 +224,7 @@ application-level cache. DataLoader is first and foremost a data loading mechanism,

var app = express()
const app = express()
app.get('/', function(req, res) {
var authToken = authenticateUser(req)
var loaders = createLoaders(authToken)
const authToken = authenticateUser(req)
const loaders = createLoaders(authToken)
res.send(renderPage(req, loaders))

@@ -182,2 +236,34 @@ })

#### Caching and Batching
Subsequent calls to `.load()` with the same key will result in that key not
appearing in the keys provided to your batch function. *However*, the resulting
Promise will still wait on the current batch to complete. This way both cached
and uncached requests will resolve at the same time, allowing DataLoader
optimizations for subsequent dependent loads.
In the example below, User `1` happens to be cached. However, because User `1`
and `2` are loaded in the same tick, they will resolve at the same time. This
means both `user.bestFriendID` loads will also happen in the same tick which
results in two total requests (the same as if User `1` had not been cached).
```js
userLoader.prime(1, { bestFriend: 3 })
async function getBestFriend(userID) {
const user = await userLoader.load(userID)
return await userLoader.load(user.bestFriendID)
}
// In one part of your application
getBestFriend(1)
// Elsewhere
getBestFriend(2)
```
Without this optimization, if the cached User `1` resolved immediately, this
could result in three total requests since each `user.bestFriendID` load would
happen at different times.
#### Clearing Cache

@@ -195,14 +281,13 @@

// Request begins...
var userLoader = new DataLoader(...)
const userLoader = new DataLoader(...)
// And a value happens to be loaded (and cached).
userLoader.load(4).then(...)
const user = await userLoader.load(4)
// A mutation occurs, invalidating what might be in cache.
sqlRun('UPDATE users WHERE id=4 SET username="zuck"').then(
() => userLoader.clear(4)
)
await sqlRun('UPDATE users WHERE id=4 SET username="zuck"')
userLoader.clear(4)
// Later the value load is loaded again so the mutated data appears.
userLoader.load(4).then(...)
const user = await userLoader.load(4)

@@ -222,8 +307,10 @@ // Request completes.

```js
userLoader.load(1).catch(error => {
if (/* determine if should clear error */) {
userLoader.clear(1);
try {
const user = await userLoader.load(1)
} catch (error) {
if (/* determine if the error should not be cached */) {
userLoader.clear(1)
}
throw error;
});
throw error
}
```

@@ -246,3 +333,3 @@

```js
var myLoader = new DataLoader(keys => {
const myLoader = new DataLoader(keys => {
console.log(keys)

@@ -266,3 +353,3 @@ return someBatchLoadFn(keys)

```js
var myLoader = new DataLoader(keys => {
const myLoader = new DataLoader(keys => {
identityLoader.clearAll()

@@ -273,3 +360,30 @@ return someBatchLoadFn(keys)

#### Custom Cache
As mentioned above, DataLoader is intended to be used as a per-request cache.
Since requests are short-lived, DataLoader uses an infinitely growing [Map][] as
a memoization cache. This should not pose a problem as most requests are
short-lived and the entire cache can be discarded after the request completes.
However this memoization caching strategy isn't safe when using a long-lived
DataLoader, since it could consume too much memory. If using DataLoader in this
way, you can provide a custom Cache instance with whatever behavior you prefer,
as long as it follows the same API as [Map][].
The example below uses an LRU (least recently used) cache to limit total memory
to hold at most 100 cached values via the [lru_map][] npm package.
```js
import { LRUMap } from 'lru_map'
const myLoader = new DataLoader(someBatchLoadFn, {
cacheMap: new LRUMap(100)
})
```
More specifically, any object that implements the methods `get()`, `set()`,
`delete()` and `clear()` methods can be provided. This allows for custom Maps
which implement various [cache algorithms][] to be provided.
## API

@@ -298,7 +412,8 @@

| ---------- | ---- | ------- | ----------- |
| *batch* | Boolean | `true` | Set to `false` to disable batching, invoking `batchLoadFn` with a single load key.
| *maxBatchSize* | Number | `Infinity` | Limits the number of items that get passed in to the `batchLoadFn`.
| *cache* | Boolean | `true` | Set to `false` to disable memoization caching, creating a new Promise and new key in the `batchLoadFn` for every load of the same key.
| *batch* | Boolean | `true` | Set to `false` to disable batching, invoking `batchLoadFn` with a single load key. This is equivalent to setting `maxBatchSize` to `1`.
| *maxBatchSize* | Number | `Infinity` | Limits the number of items that get passed in to the `batchLoadFn`. May be set to `1` to disable batching.
| *batchScheduleFn* | Function | See [Batch scheduling](#batch-scheduling) | A function to schedule the later execution of a batch. The function is expected to call the provided callback in the immediate future.
| *cache* | Boolean | `true` | Set to `false` to disable memoization caching, creating a new Promise and new key in the `batchLoadFn` for every load of the same key. This is equivalent to setting `cacheMap` to `null`.
| *cacheKeyFn* | Function | `key => key` | Produces cache key for a given load key. Useful when objects are keys and two objects should be considered equivalent.
| *cacheMap* | Object | `new Map()` | Instance of [Map][] (or an object with a similar API) to be used as cache.
| *cacheMap* | Object | `new Map()` | Instance of [Map][] (or an object with a similar API) to be used as cache. May be set to `null` to disable caching.

@@ -316,14 +431,23 @@ ##### `load(key)`

```js
var [ a, b ] = await myLoader.loadMany([ 'a', 'b' ]);
const [ a, b ] = await myLoader.loadMany([ 'a', 'b' ])
```
This is equivalent to the more verbose:
This is similar to the more verbose:
```js
var [ a, b ] = await Promise.all([
const [ a, b ] = await Promise.all([
myLoader.load('a'),
myLoader.load('b')
]);
])
```
However it is different in the case where any load fails. Where
Promise.all() would reject, loadMany() always resolves, however each result
is either a value or an Error instance.
```js
var [ a, b, c ] = await myLoader.loadMany([ 'a', 'b', 'badkey' ]);
// c instanceof Error
```
- *keys*: An array of key values to load.

@@ -350,2 +474,3 @@

To prime the cache with an error at a key, provide an Error instance.

@@ -386,3 +511,3 @@ ## Using with GraphQL

```js
var UserType = new GraphQLObjectType({
const UserType = new GraphQLObjectType({
name: 'User',

@@ -400,5 +525,8 @@ fields: () => ({

type: new GraphQLList(UserType),
resolve: (user, { first }) => queryLoader.load([
'SELECT toID FROM friends WHERE fromID=? LIMIT ?', user.id, first
]).then(rows => rows.map(row => userLoader.load(row.toID)))
resolve: async (user, { first }) => {
const rows = await queryLoader.load([
'SELECT toID FROM friends WHERE fromID=? LIMIT ?', user.id, first
])
return rows.map(row => userLoader.load(row.toID))
}
}

@@ -425,11 +553,11 @@ })

stories: new DataLoader(keys => genStories(authToken, keys)),
};
}
}
// When handling an incoming web request:
var loaders = createLoaders(request.query.authToken);
const loaders = createLoaders(request.query.authToken)
// Then, within application logic:
var user = await loaders.users.load(4);
var pic = await loaders.cdnUrls.load(user.rawPicUrl);
const user = await loaders.users.load(4)
const pic = await loaders.cdnUrls.load(user.rawPicUrl)
```

@@ -449,29 +577,52 @@

```js
let userByIDLoader = new DataLoader(ids => genUsersByID(ids).then(users => {
const userByIDLoader = new DataLoader(async ids => {
const users = await genUsersByID(ids)
for (let user of users) {
usernameLoader.prime(user.username, user);
usernameLoader.prime(user.username, user)
}
return users;
}));
return users
})
let usernameLoader = new DataLoader(names => genUsernames(names).then(users => {
const usernameLoader = new DataLoader(async names => {
const users = await genUsernames(names)
for (let user of users) {
userByIDLoader.prime(user.id, user);
userByIDLoader.prime(user.id, user)
}
return users;
}));
return users
})
```
### Freezing results to enforce immutability
## Custom Caches
Since DataLoader caches values, it's typically assumed these values will be
treated as if they were immutable. While DataLoader itself doesn't enforce
this, you can create a higher-order function to enforce immutability
with Object.freeze():
DataLoader can optionaly be provided a custom Map instance to use as its
memoization cache. More specifically, any object that implements the methods `get()`,
`set()`, `delete()` and `clear()` can be provided. This allows for custom Maps
which implement various [cache algorithms][] to be provided. By default,
DataLoader uses the standard [Map][] which simply grows until the DataLoader
is released. The default is appropriate when requests to your application are
short-lived.
```js
function freezeResults(batchLoader) {
return keys => batchLoader(keys).then(values => values.map(Object.freeze))
}
const myLoader = new DataLoader(freezeResults(myBatchLoader))
```
### Batch functions which return Objects instead of Arrays
DataLoader expects batch functions which return an Array of the same length as
the provided keys. However this is not always a common return format from other
libraries. A DataLoader higher-order function can convert from one format to another. The example below converts a `{ key: value }` result to the format
DataLoader expects.
```js
function objResults(batchLoader) {
return keys => batchLoader(keys).then(objValues => keys.map(
key => objValues[key] || new Error(`No value for ${key}`)
))
}
const myLoader = new DataLoader(objResults(myBatchLoader))
```
## Common Back-ends

@@ -481,17 +632,31 @@

## Other implementations
## Other Implementations
Listed in alphabetical order
* Elixir
* [dataloader](https://github.com/absinthe-graphql/dataloader)
* Golang
* [Dataloader](https://github.com/nicksrandall/dataloader)
* Java
* [java-dataloader](https://github.com/graphql-java/java-dataloader)
* .Net
* [GraphQL .NET DataLoader](https://graphql-dotnet.github.io/docs/guides/dataloader/)
* [GreenDonut](https://github.com/ChilliCream/greendonut)
* Perl
* [perl-DataLoader](https://github.com/richardjharris/perl-DataLoader)
* PHP
* [DataLoaderPHP](https://github.com/overblog/dataloader-php)
* Python
* [aiodataloader](https://github.com/syrusakbary/aiodataloader)
* ReasonML
* [bs-dataloader](https://github.com/ulrikstrid/bs-dataloader)
* Ruby
* [BatchLoader](https://github.com/exaspark/batch-loader)
* [Dataloader](https://github.com/sheerun/dataloader)
* [BatchLoader](https://github.com/exaspark/batch-loader)
* ReasonML
* [bs-dataloader](https://github.com/ulrikstrid/bs-dataloader)
* Java
* [java-dataloader](https://github.com/graphql-java/java-dataloader)
* Elixir
* [dataloader](https://github.com/absinthe-graphql/dataloader)
* Golang
* [Dataloader](https://github.com/nicksrandall/dataloader)
* [GraphQL Batch](https://github.com/Shopify/graphql-batch)
* Rust
* [Dataloader](https://github.com/cksac/dataloader-rs)
* Swift
* [SwiftDataLoader](https://github.com/kimdv/SwiftDataLoader)

@@ -502,2 +667,6 @@ ## Video Source Code Walkthrough

A walkthrough of the DataLoader v1 source code. While the source has changed
since this video was made, it is still a good overview of the rationale of
DataLoader and how it works.
<a href="https://youtu.be/OQTnXNCDywA" target="_blank" alt="DataLoader Source Code Walkthrough"><img src="https://img.youtube.com/vi/OQTnXNCDywA/0.jpg" /></a>

@@ -512,1 +681,3 @@

[babel/polyfill]: https://babeljs.io/docs/usage/polyfill/
[lru_map]: https://github.com/rsms/js-lru
[source code]: https://github.com/graphql/dataloader/blob/master/src/index.js

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc