Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

fastify-rate-limit

Package Overview
Dependencies
Maintainers
8
Versions
36
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

fastify-rate-limit - npm Package Compare versions

Comparing version 4.0.3 to 4.1.0

.github/dependabot.yml

2

example/example-custom.js

@@ -20,3 +20,3 @@ 'use strict'

var knex = Knex({
const knex = Knex({
client: 'sqlite3',

@@ -23,0 +23,0 @@ connection: {

@@ -20,3 +20,3 @@ 'use strict'

// cache: 10000,
whitelist: ['127.0.0.2'], // global whitelist access ( ACL based on the key from the keyGenerator)
allowList: ['127.0.0.2'], // global allowList access ( ACL based on the key from the keyGenerator)
redis: redis, // connection to redis

@@ -42,3 +42,3 @@ skipOnError: false // default false

max: 3,
whitelist: ['127.0.2.1', '127.0.3.1'],
allowList: ['127.0.2.1', '127.0.3.1'],
timeWindow: '1 minute'

@@ -59,3 +59,3 @@ }

timeWindow: '1 minute',
whitelist: ['127.0.2.1'],
allowList: ['127.0.2.1'],
onExceeding: function (req) {

@@ -62,0 +62,0 @@ console.log('callback on exceededing ... executed before response to client. req is give as argument')

@@ -10,3 +10,3 @@ /// <reference types="node" />

RequestGeneric extends RequestGenericInterface = RequestGenericInterface
> {
> {
ip: string | number

@@ -16,3 +16,3 @@ }

export interface FastifyRateLimitOptions {}
export interface FastifyRateLimitOptions { }

@@ -25,7 +25,7 @@ export interface errorResponseBuilderContext {

export interface FastifyRateLimitStoreCtor {
new (options: FastifyRateLimitOptions): FastifyRateLimitStore;
new(options: FastifyRateLimitOptions): FastifyRateLimitStore;
}
export interface FastifyRateLimitStore {
incr(key: string, callback: ( error: Error|null, result?: { current: number, ttl: number } ) => void): void;
incr(key: string, callback: (error: Error | null, result?: { current: number, ttl: number }) => void): void;
child(routeOptions: RouteOptions & { path: string, prefix: string }): FastifyRateLimitStore;

@@ -44,6 +44,10 @@ }

max?: number | ((req: FastifyRequest, key: string) => number);
timeWindow?: number;
timeWindow?: number | string;
cache?: number;
store?: FastifyRateLimitStoreCtor;
/**
* @deprecated Use `allowList` property
*/
whitelist?: string[] | ((req: FastifyRequest, key: string) => boolean);
allowList?: string[] | ((req: FastifyRequest, key: string) => boolean);
redis?: any;

@@ -50,0 +54,0 @@ skipOnError?: boolean;

@@ -45,3 +45,3 @@ 'use strict'

globalParams.whitelist = settings.whitelist || null
globalParams.allowList = settings.allowList || settings.whitelist || null
globalParams.ban = settings.ban || null

@@ -51,3 +51,3 @@

const pluginComponent = {
whitelist: globalParams.whitelist
allowList: globalParams.allowList
}

@@ -129,10 +129,10 @@

// whitelist doesn't apply any rate limit
if (pluginComponent.whitelist) {
if (typeof pluginComponent.whitelist === 'function') {
if (pluginComponent.whitelist(req, key)) {
// allowList doesn't apply any rate limit
if (pluginComponent.allowList) {
if (typeof pluginComponent.allowList === 'function') {
if (pluginComponent.allowList(req, key)) {
next()
return
}
} else if (pluginComponent.whitelist.indexOf(key) > -1) {
} else if (pluginComponent.allowList.indexOf(key) > -1) {
next()

@@ -143,3 +143,3 @@ return

// As the key is not whitelist in redis/lru, then we increment the rate-limit of the current request and we call the function "onIncr"
// As the key is not allowList in redis/lru, then we increment the rate-limit of the current request and we call the function "onIncr"
pluginComponent.store.incr(key, onIncr)

@@ -146,0 +146,0 @@

{
"name": "fastify-rate-limit",
"version": "4.0.3",
"version": "4.1.0",
"description": "A low overhead rate limiter for your routes",

@@ -8,2 +8,4 @@ "main": "index.js",

"redis": "docker run -p 6379:6379 --rm redis:3.0.7",
"lint": "standard",
"lint:fix": "standard --fix",
"test": "standard && tap test/*.test.js && npm run typescript",

@@ -34,9 +36,9 @@ "typescript": "tsd"

"sqlite3": "^5.0.0",
"standard": "^14.0.2",
"standard": "^16.0.2",
"tap": "^14.10.7",
"tsd": "^0.13.1"
"tsd": "^0.14.0"
},
"dependencies": {
"fast-json-stringify": "^2.2.1",
"fastify-plugin": "^2.0.0",
"fastify-plugin": "^3.0.0",
"ms": "^2.1.1",

@@ -43,0 +45,0 @@ "tiny-lru": "^7.0.0"

@@ -65,3 +65,3 @@ # fastify-rate-limit

cache: 10000, // default 5000
whitelist: ['127.0.0.1'], // default []
allowList: ['127.0.0.1'], // default []
redis: new Redis({ host: '127.0.0.1' }), // default null

@@ -85,3 +85,3 @@ skipOnError: true, // default false

- `cache`: this plugin internally uses a lru cache to handle the clients, you can change the size of the cache with this option
- `whitelist`: array of string of ips to exclude from rate limiting. It can be a sync function with the signature `(req, key) => {}` where `req` is the Fastify request object and `key` is the value generated by the `keyGenerator`. If the function return a truthy value, the request will be excluded from the rate limit.
- `allowList`: array of string of ips to exclude from rate limiting. It can be a sync function with the signature `(req, key) => {}` where `req` is the Fastify request object and `key` is the value generated by the `keyGenerator`. If the function return a truthy value, the request will be excluded from the rate limit.
- `redis`: by default this plugins uses an in-memory store, which is fast but if you application works on more than one server it is useless, since the data is store locally.<br>

@@ -105,2 +105,3 @@ You can pass a Redis client here and magically the issue is solved. To achieve the maximum speed, this plugins requires the use of [`ioredis`](https://github.com/luin/ioredis). **Note:**: the [default parameters](https://github.com/luin/ioredis/blob/v4.16.0/API.md#new_Redis_new) of a redis connection are not the fastest to provide a rate-limit. We suggest to customize the `connectTimeout` and `maxRetriesPerRequest` as in the [`example`](https://github.com/fastify/fastify-rate-limit/tree/master/example/example.js).

|| req.raw.ip // fallback to default
}
})

@@ -135,7 +136,7 @@ ```

Dynamic `whitelist` example usage:
Dynamic `allowList` example usage:
```js
fastify.register(require('fastify-rate-limit'), {
/* ... */
whitelist: function(req, key) {
allowList: function(req, key) {
return req.headers['x-app-client-id'] === 'internal-usage'

@@ -187,13 +188,13 @@ }

For example the `whitelist` if configured:
For example the `allowList` if configured:
- on the plugin registration will affect all endpoints within the encapsulation scope
- on the route declaration will affect only the targeted endpoint
The global whitelist is configured when registering it with `fastify.register(...)`.
The global allowlist is configured when registering it with `fastify.register(...)`.
The endpoint whitelist is set on the endpoint directly with the `{ config : { rateLimit : { whitelist : [] } } }` object.
The endpoint allowlist is set on the endpoint directly with the `{ config : { rateLimit : { allowList : [] } } }` object.
ACL checking is performed based on the value of the key from the `keyGenerator`.
In this example we are checking the IP address, but it could be a whitelist of specific user identifiers (like JWT or tokens):
In this example we are checking the IP address, but it could be an allowlist of specific user identifiers (like JWT or tokens):

@@ -207,3 +208,3 @@ ```js

max: 3000, // default global max rate limit
whitelist: ['192.168.0.10'], // global whitelist access.
allowList: ['192.168.0.10'], // global allowlist access.
redis: redis, // custom connection to redis

@@ -246,3 +247,3 @@ })

timeWindow: '1 minute',
whitelist: ['127.0.0.1'],
allowList: ['127.0.0.1'],
onExceeding: function (req) {

@@ -249,0 +250,0 @@ console.log('callback on exceededing ... executed before response to client')

@@ -23,3 +23,3 @@ 'use strict'

LocalStore.prototype.incr = function (ip, cb) {
var current = this.lru.get(ip) || 0
let current = this.lru.get(ip) || 0
this.lru.set(ip, ++current)

@@ -26,0 +26,0 @@

@@ -12,3 +12,3 @@ 'use strict'

RedisStore.prototype.incr = function (ip, cb) {
var key = this.key + ip
const key = this.key + ip
this.redis.pipeline()

@@ -15,0 +15,0 @@ .incr(key)

@@ -110,2 +110,31 @@ 'use strict'

test('With ips allowList', t => {
t.plan(6)
const fastify = Fastify()
fastify.register(rateLimit, {
max: 2,
timeWindow: '2s',
allowList: ['127.0.0.1']
})
fastify.get('/', (req, reply) => {
reply.send('hello!')
})
fastify.inject('/', (err, res) => {
t.error(err)
t.strictEqual(res.statusCode, 200)
fastify.inject('/', (err, res) => {
t.error(err)
t.strictEqual(res.statusCode, 200)
fastify.inject('/', (err, res) => {
t.error(err)
t.strictEqual(res.statusCode, 200)
})
})
})
})
test('With ips whitelist', t => {

@@ -140,3 +169,3 @@ t.plan(6)

test('With function whitelist', t => {
test('With function allowList', t => {
t.plan(24)

@@ -148,3 +177,3 @@ const fastify = Fastify()

keyGenerator () { return 42 },
whitelist: function (req, key) {
allowList: function (req, key) {
t.ok(req.headers)

@@ -160,3 +189,3 @@ t.equals(key, 42)

const whitelistHeader = {
const allowListHeader = {
method: 'GET',

@@ -169,11 +198,11 @@ url: '/',

fastify.inject(whitelistHeader, (err, res) => {
fastify.inject(allowListHeader, (err, res) => {
t.error(err)
t.strictEqual(res.statusCode, 200)
fastify.inject(whitelistHeader, (err, res) => {
fastify.inject(allowListHeader, (err, res) => {
t.error(err)
t.strictEqual(res.statusCode, 200)
fastify.inject(whitelistHeader, (err, res) => {
fastify.inject(allowListHeader, (err, res) => {
t.error(err)

@@ -180,0 +209,0 @@ t.strictEqual(res.statusCode, 200)

@@ -128,3 +128,3 @@ 'use strict'

test('With ips whitelist', t => {
test('With ips allowList', t => {
t.plan(6)

@@ -134,3 +134,3 @@ const fastify = Fastify()

global: false,
whitelist: ['127.0.0.1']
allowList: ['127.0.0.1']
})

@@ -160,3 +160,3 @@

test('With function whitelist', t => {
test('With function allowList', t => {
t.plan(24)

@@ -167,3 +167,3 @@ const fastify = Fastify()

keyGenerator () { return 42 },
whitelist: function (req, key) {
allowList: function (req, key) {
t.ok(req.headers)

@@ -181,3 +181,3 @@ t.equals(key, 42)

const whitelistHeader = {
const allowListHeader = {
method: 'GET',

@@ -190,11 +190,11 @@ url: '/',

fastify.inject(whitelistHeader, (err, res) => {
fastify.inject(allowListHeader, (err, res) => {
t.error(err)
t.strictEqual(res.statusCode, 200)
fastify.inject(whitelistHeader, (err, res) => {
fastify.inject(allowListHeader, (err, res) => {
t.error(err)
t.strictEqual(res.statusCode, 200)
fastify.inject(whitelistHeader, (err, res) => {
fastify.inject(allowListHeader, (err, res) => {
t.error(err)

@@ -425,3 +425,3 @@ t.strictEqual(res.statusCode, 200)

test('works with existing route config', t => {
t.plan(3)
t.plan(4)
const fastify = Fastify()

@@ -439,3 +439,3 @@ fastify.register(rateLimit, { max: 2, timeWindow: 1000 })

fastify.inject('/', (err, res) => {
if (err) {}
t.error(err)
t.strictEqual(res.headers['x-ratelimit-limit'], 2)

@@ -442,0 +442,0 @@ t.strictEqual(res.headers['x-ratelimit-remaining'], 1)

@@ -21,3 +21,3 @@ import * as http from 'http'

cache: 10000,
whitelist: ['127.0.0.1'],
allowList: ['127.0.0.1'],
redis: new ioredis({ host: '127.0.0.1' }),

@@ -39,3 +39,3 @@ skipOnError: true,

max: (req: FastifyRequest<RequestGenericInterface>, key: string) => (42),
whitelist: (req: FastifyRequest<RequestGenericInterface>, key: string) => (false),
allowList: (req: FastifyRequest<RequestGenericInterface>, key: string) => (false),
timeWindow: 5000

@@ -42,0 +42,0 @@ }

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc