change-propagation
Advanced tools
Comparing version 0.3.1 to 0.3.2
@@ -5,4 +5,6 @@ "use strict"; | ||
const uuid = require('cassandra-uuid').TimeUuid; | ||
const HTTPError = require('hyperswitch').HTTPError; | ||
const TopicsNotExistError = require('wmf-kafka-node/lib/errors').TopicsNotExistError; | ||
const Task = require('./task'); | ||
const utils = require('./utils'); | ||
@@ -67,15 +69,15 @@ /** | ||
_exec(event, statName, statDelayStartTime) { | ||
_exec(origEvent, statName, statDelayStartTime, retryEvent) { | ||
const rule = this.rule; | ||
this.log(`trace/${rule.name}`, { msg: 'Event message received', event: event }); | ||
this.log(`trace/${rule.name}`, { msg: 'Event message received', event: origEvent }); | ||
// latency from the original event creation time to execution time | ||
this.hyper.metrics.endTiming([statName + '_delay'], | ||
statDelayStartTime || new Date(event.meta.dt)); | ||
statDelayStartTime || new Date(origEvent.meta.dt)); | ||
const startTime = Date.now(); | ||
const expander = { | ||
message: event, | ||
match: rule.expand(event) | ||
message: origEvent, | ||
match: rule.expand(origEvent) | ||
}; | ||
@@ -85,3 +87,4 @@ return P.each(rule.exec, (tpl) => { | ||
request.headers = Object.assign(request.headers, { | ||
'x-request-id': event.meta.request_id | ||
'x-request-id': origEvent.meta.request_id, | ||
'x-triggered-by': utils.triggeredBy(retryEvent || origEvent) | ||
}); | ||
@@ -97,5 +100,10 @@ return this.hyper.request(request); | ||
try { | ||
return JSON.parse(message); | ||
return P.resolve(JSON.parse(message)); | ||
} catch (e) { | ||
this.log(`error/${this.rule.name}`, e); | ||
return this.hyper.post({ | ||
uri: '/sys/queue/events', | ||
body: [ this._constructErrorMessage(e, message) ] | ||
}) | ||
.thenReturn(undefined); | ||
} | ||
@@ -134,36 +142,44 @@ } | ||
const statName = this.hyper.metrics.normalizeName(this.rule.name + '_retry'); | ||
let message = this._safeParse(msg.value); | ||
if (!message) { | ||
// Don't retry if we can't parse an event, just log. | ||
return; | ||
} | ||
return this._safeParse(msg.value) | ||
.then((message) => { | ||
if (!message) { | ||
// Don't retry if we can't parse an event, just log. | ||
return; | ||
} | ||
if (message.emitter_id !== this._consumerId()) { | ||
// Not our business, don't care | ||
return; | ||
} | ||
if (message.emitter_id !== this._consumerId()) { | ||
// Not our business, don't care | ||
return; | ||
} | ||
if (this._isLimitExceeded(message)) { | ||
// We've don our best, give up | ||
return; | ||
} | ||
if (this._isLimitExceeded(message)) { | ||
// We've don our best, give up | ||
return; | ||
} | ||
if (!this._test(message.original_event)) { | ||
// doesn't match any more, possibly meaning | ||
// the rule has been changed since we last | ||
// executed it on the message | ||
return; | ||
} | ||
if (!this._test(message.original_event)) { | ||
// doesn't match any more, possibly meaning | ||
// the rule has been changed since we last | ||
// executed it on the message | ||
return; | ||
} | ||
return this.taskQueue.enqueue(new Task(consumer, message, | ||
this._exec.bind(this, message.original_event, | ||
statName, new Date(message.meta.dt)), | ||
(e) => { | ||
const retryMessage = this._constructRetryMessage(message.original_event, | ||
e, message.retries_left - 1); | ||
if (this.rule.shouldRetry(e) && !this._isLimitExceeded(retryMessage)) { | ||
return this._retry(retryMessage); | ||
return this.taskQueue.enqueue(new Task(consumer, message, | ||
this._exec.bind(this, message.original_event, | ||
statName, new Date(message.meta.dt), message), | ||
(e) => { | ||
const retryMessage = this._constructRetryMessage(message.original_event, | ||
e, message.retries_left - 1, message); | ||
if (this.rule.shouldRetry(e)) { | ||
if (this._isLimitExceeded(retryMessage)) { | ||
return this.hyper.post({ | ||
uri: '/sys/queue/events', | ||
body: [this._constructErrorMessage(e, message)] | ||
}); | ||
} | ||
return this._retry(retryMessage); | ||
} | ||
} | ||
} | ||
)); | ||
)); | ||
}); | ||
}); | ||
@@ -188,3 +204,3 @@ }); | ||
_constructRetryMessage(event, errorRes, retriesLeft) { | ||
_constructRetryMessage(event, errorRes, retriesLeft, retryEvent) { | ||
return { | ||
@@ -200,2 +216,3 @@ meta: { | ||
}, | ||
triggered_by: utils.triggeredBy(retryEvent || event), | ||
emitter_id: this._consumerId(), | ||
@@ -208,2 +225,37 @@ retries_left: retriesLeft === undefined ? this.rule.spec.retry_limit : retriesLeft, | ||
/** | ||
* Create an error message for a special Kafka topic | ||
* | ||
* @param {Error} e an exception that caused a failure | ||
* @param {string|Object} event an original event. In case JSON parsing failed - it's a string. | ||
*/ | ||
_constructErrorMessage(e, event) { | ||
const eventUri = typeof event === 'string' ? '/error/uri' : event.meta.uri; | ||
const domain = typeof event === 'string' ? 'unknown' : event.meta.domain; | ||
const now = new Date(); | ||
const errorEvent = { | ||
meta: { | ||
topic: 'change-prop.error', | ||
schema_uri: 'error/1', | ||
uri: eventUri, | ||
request_id: typeof event === 'string' ? undefined : event.meta.request_id, | ||
id: uuid.fromDate(now), | ||
dt: now.toISOString(), | ||
domain: domain | ||
}, | ||
emitter_id: 'change-prop#' + this.rule.name, | ||
raw_event: typeof event === 'string' ? event : JSON.stringify(event), | ||
message: e.message, | ||
stack: e.stack | ||
}; | ||
if (e.constructor === HTTPError) { | ||
errorEvent.details = { | ||
status: e.status, | ||
headers: e.headers, | ||
body: e.body | ||
}; | ||
} | ||
return errorEvent; | ||
} | ||
subscribe() { | ||
@@ -220,25 +272,27 @@ const rule = this.rule; | ||
const statName = this.hyper.metrics.normalizeName(this.rule.name); | ||
let message = this._safeParse(msg.value); | ||
if (!message || !this._test(message)) { | ||
// no message or no match, we are done here | ||
return; | ||
} | ||
return this._safeParse(msg.value) | ||
.then((message) => { | ||
if (!message || !this._test(message)) { | ||
// no message or no match, we are done here | ||
return; | ||
} | ||
return this.taskQueue.enqueue(new Task( | ||
consumer, | ||
msg, | ||
this._exec.bind(this, message, statName), | ||
(e) => { | ||
if (this.rule.shouldRetry(e)) { | ||
return this._retry(this._constructRetryMessage(message, e)); | ||
return this.taskQueue.enqueue(new Task( | ||
consumer, | ||
msg, | ||
this._exec.bind(this, message, statName), | ||
(e) => { | ||
if (this.rule.shouldRetry(e)) { | ||
return this._retry(this._constructRetryMessage(message, e)); | ||
} | ||
} | ||
} | ||
)); | ||
)); | ||
}); | ||
}); | ||
}) | ||
.catch(TopicsNotExistError, (e) => { | ||
this.log('error/topic', e); | ||
// Exit async to let the logs get processed. | ||
setTimeout(() => process.exit(1), 100); | ||
}); | ||
}) | ||
.catch(TopicsNotExistError, (e) => { | ||
this.log('error/topic', e); | ||
// Exit async to let the logs get processed. | ||
setTimeout(() => process.exit(1), 100); | ||
}); | ||
@@ -245,0 +299,0 @@ } |
@@ -51,3 +51,2 @@ 'use strict'; | ||
return '{' + Object.keys(obj).map((key) => { | ||
const field = obj[key]; | ||
return key + ': ' + _getMatchObjCode(obj[key]); | ||
@@ -65,2 +64,29 @@ }).join(', ') + '}'; | ||
function _compileNamedRegex(obj, result, name, fieldName) { | ||
const captureNames = []; | ||
const normalRegex = obj.replace(/\(\?<(\w+)>/g, (_, name) => { | ||
captureNames.push(name); | ||
return '('; | ||
}); | ||
const numGroups = (new RegExp(normalRegex.toString() + '|')).exec('').length - 1; | ||
if (captureNames.length && captureNames.length !== numGroups) { | ||
throw new Error('Invalid match regex. ' + | ||
'Mixing named and unnamed capture groups are not supported. Regex: ' + obj); | ||
} | ||
if (!captureNames.length) { | ||
// No named captures | ||
result[fieldName] = `${normalRegex}.exec(${name})`; | ||
} else { | ||
let code = `(() => { const execRes = ${normalRegex}.exec(${name}); const res = {}; `; | ||
captureNames.forEach((captureName, index) => { | ||
code += `res['${captureName}'] = execRes[${index + 1}]; `; | ||
}); | ||
result[fieldName] = code + 'return res; })()'; | ||
} | ||
return `${normalRegex}.test(${name})`; | ||
} | ||
function _compileMatch(obj, result, name, fieldName) { | ||
@@ -85,4 +111,3 @@ if (obj.constructor !== Object) { | ||
// it's a regex, we have to the test the arg | ||
result[fieldName] = `${obj}.exec(${name})`; | ||
return `${obj}.test(${name})`; | ||
return _compileNamedRegex(obj, result, name, fieldName); | ||
} | ||
@@ -89,0 +114,0 @@ |
{ | ||
"name": "change-propagation", | ||
"version": "0.3.1", | ||
"version": "0.3.2", | ||
"description": "Listens to events from Kafka and delivers them", | ||
@@ -37,7 +37,7 @@ "main": "server.js", | ||
"dependencies": { | ||
"bluebird": "^3.3.5", | ||
"bluebird": "^3.4.0", | ||
"cassandra-uuid": "^0.0.2", | ||
"extend": "^3.0.0", | ||
"hyperswitch": "^0.4.9", | ||
"service-runner": "^1.2.2", | ||
"hyperswitch": "^0.5.0", | ||
"service-runner": "^1.3.0", | ||
"wmf-kafka-node": "^0.1.3", | ||
@@ -44,0 +44,0 @@ "json-stable-stringify": "^1.0.1", |
@@ -11,3 +11,61 @@ # Change Propagation | ||
The purpose of the change propagation service is executing actions based on events. The service | ||
listens to kafka topics, and executes handlers for events according to configurable rules. Currently, | ||
a rule could issue HTTP requests, produce new messages, or make an HTCP purge request. The list of | ||
supported actions is easily expandable by creating new modules with internal HTTP endpoints and | ||
calling them from the rules. | ||
## Features | ||
- Config-based rules for message processing. For more information about rules configuration | ||
see [Configuration](##Rule configuration) section. | ||
- Automatic limited retries | ||
- Global rule execution concurrency limiting | ||
- Metrics and logging support | ||
## Rule Configuration | ||
A `Rule` is a semantically meaningful piece of service functionality. For example, | ||
'Rerender RESTBase if the page was changed', or 'Update summary if RESTBase render was changed' | ||
are both rules. To specify the rules, you need to add a property to the `kafka` module config | ||
[template property](https://github.com/wikimedia/change-propagation/blob/master/config.example.yaml#L48). | ||
Each rule is executed by a single worker, but internal load-balancing mechanism tries to distribute | ||
rules to workers equally. | ||
The rule can contain the following properties: | ||
- **topic** A name of the topic to subscribe to. | ||
- **match** An optional predicate for a message. The rule is executed only if all of the `match` | ||
properties were satisfied by the message. Properties could be nested objects, constants | ||
or a regex. Regex could contain capture groups and captured values will later be accessible | ||
in the `exec` part of the rule. Capture groups could be named, using the `(?<name>group)` syntax, then | ||
the captured value would be accessible under `match.property_name.capture_name` within the `exec` part. | ||
Named and unnamed captures can not be mixed together. | ||
- **match_not** An optional predicate which must not match for a rule to be executed. It doesn't capture values | ||
and doesn't make them accessible to the `exec` part of the rule. | ||
- **exec** An array of HTTP request templates, that will be executed sequentially if the rule matched. | ||
The template follows [request templating syntax](https://github.com/wikimedia/swagger-router#request-templating). | ||
The template is evaluated with a `context` that has `message` global property with an original message, | ||
and `match` property with values extracted by the match. | ||
Here's an example of the rule, which would match all `resource_change` messages, emitted by `RESTBase`, | ||
and purge varnish caches for the resources by issuing an HTTP request to a special internal module, that would | ||
convert it to HTCP purge and make an HTCP request: | ||
```yaml | ||
purge_varnish: | ||
topic: resource_change | ||
match: | ||
meta: | ||
uri: '/^https?:\/\/[^\/]+\/api\/rest_v1\/(?<rest>.+)$/' | ||
tags: | ||
- restbase | ||
exec: | ||
method: post | ||
uri: '/sys/purge/' | ||
body: | ||
- meta: | ||
uri: '//{{message.meta.domain}}/api/rest_v1/{{match.meta.uri.rest}}' | ||
``` | ||
## Testing | ||
@@ -38,1 +96,26 @@ | ||
## Running locally | ||
To run the service locally, you need to have to have kafka and zookeeper installed | ||
and run. Example of installation and configuration can be found in the [Testing](##Testing) | ||
section of this readme. After kafka is installed, configured, and run with `npm run start-kafka` | ||
command, copy the example config and run the service: | ||
```bash | ||
cp config.example.yaml config.yaml | ||
npm start | ||
``` | ||
Also, before using the service you need to ensure that all topics used in your config | ||
exist in kafka. Topics should be prefixed with a datacenter name (default is `default`). Also, | ||
each topic must have a retry topic. So, if you are using a topic named `test_topic`, the follwing | ||
topics must exist in kafka: | ||
``` | ||
- 'default.test_topic' | ||
- 'default.change-prop.retry.test_topic' | ||
``` | ||
## Bug Reporting | ||
The service is maintained by the [Wikimedia Services Team](https://www.mediawiki.org/wiki/Wikimedia_Services). | ||
For bug reporting use [EventBus project on Phabricator](https://phabricator.wikimedia.org/tag/eventbus/) | ||
or [#wikimedia-services](https://kiwiirc.com/client/irc.freenode.net:+6697/#teleirc) IRC channel on freenode. | ||
@@ -6,2 +6,3 @@ "use strict"; | ||
const Template = HyperSwitch.Template; | ||
const utils = require('../lib/utils'); | ||
@@ -66,2 +67,3 @@ const CONTINUE_TOPIC_NAME = 'change-prop.backlinks.continue'; | ||
}, | ||
triggered_by: utils.triggeredBy(originalEvent), | ||
original_event: originalEvent, | ||
@@ -90,2 +92,3 @@ continue: res.body.continue.blcontinue | ||
}, | ||
triggered_by: utils.triggeredBy(originalEvent), | ||
tags: [ 'change-prop', 'backlinks' ] | ||
@@ -92,0 +95,0 @@ }; |
@@ -165,2 +165,26 @@ 'use strict'; | ||
it('expansion with named groups', function() { | ||
var r = new Rule('rule', { | ||
topic: 'nono', | ||
exec: {uri: 'a/{match.meta.uri.element}/c'}, | ||
match: { meta: { uri: "/\\/fake\\/(?<element>[^\\/]+)/" }, number: 1 } | ||
}); | ||
var exp = r.expand(msg); | ||
assert.deepEqual(exp.meta.uri, { element: 'uri' }); | ||
}); | ||
it('checks for named and unnamed groups mixing', function() { | ||
try { | ||
var r = new Rule('rule', { | ||
topic: 'nono', | ||
exec: {uri: 'a/{match.meta.uri.element}/c'}, | ||
match: { meta: { uri: "/\\/(\w+)\\/(?<element>[^\\/]+)/" }, number: 1 } | ||
}); | ||
throw new Error('Error must be thrown'); | ||
} catch (e) { | ||
assert.deepEqual(e.message, | ||
'Invalid match regex. Mixing named and unnamed capture groups are not supported. Regex: /\\/(w+)\\/(?<element>[^\\/]+)/'); | ||
} | ||
}); | ||
}); | ||
@@ -167,0 +191,0 @@ |
@@ -24,2 +24,3 @@ "use strict"; | ||
let retrySchema; | ||
let errorSchema; | ||
@@ -41,5 +42,9 @@ before(function() { | ||
.then(() => preq.get({ | ||
uri: 'https://raw.githubusercontent.com/wikimedia/mediawiki-event-schemas/master/jsonschema/retry/1.yaml' | ||
uri: 'https://raw.githubusercontent.com/wikimedia/mediawiki-event-schemas/master/jsonschema/change-prop/retry/1.yaml' | ||
})) | ||
.then((res) => retrySchema = yaml.safeLoad(res.body)); | ||
.then((res) => retrySchema = yaml.safeLoad(res.body)) | ||
.then(() => preq.get({ | ||
uri: 'https://raw.githubusercontent.com/wikimedia/mediawiki-event-schemas/master/jsonschema/error/1.yaml' | ||
})) | ||
.then((res) => errorSchema = yaml.safeLoad(res.body)); | ||
}); | ||
@@ -64,3 +69,4 @@ | ||
'content-type': 'application/json', | ||
'x-request-id': common.SAMPLE_REQUEST_ID | ||
'x-request-id': common.SAMPLE_REQUEST_ID, | ||
'x-triggered-by': 'simple_test_rule:/sample/uri' | ||
} | ||
@@ -97,7 +103,11 @@ }) | ||
'derived_field': 'test' | ||
}).reply(500, {}) | ||
}) | ||
.matchHeader('x-triggered-by', 'simple_test_rule:/sample/uri') | ||
.reply(500, {}) | ||
.post('/', { | ||
'test_field_name': 'test_field_value', | ||
'derived_field': 'test' | ||
}).reply(200, {}); | ||
}) | ||
.matchHeader('x-triggered-by', 'simple_test_rule:/sample/uri,change-prop.retry.simple_test_rule:/sample/uri') | ||
.reply(200, {}); | ||
@@ -124,7 +134,9 @@ return producer.sendAsync([{ | ||
'derived_field': 'test' | ||
}).reply(500, {}) | ||
}) | ||
.reply(500, {}) | ||
.post('/', { | ||
'test_field_name': 'test_field_value', | ||
'derived_field': 'test' | ||
}).reply(200, {}); | ||
}) | ||
.reply(200, {}); | ||
@@ -139,3 +151,4 @@ return kafkaFactory.newConsumer(kafkaFactory.newClient(), | ||
const validate = ajv.compile(retrySchema); | ||
var valid = validate(JSON.parse(message.value)); | ||
const msg = JSON.parse(message.value); | ||
const valid = validate(msg); | ||
if (!valid) { | ||
@@ -145,2 +158,4 @@ done(new assert.AssertionError({ | ||
})); | ||
} else if (msg.triggered_by !== 'simple_test_rule:/sample/uri') { | ||
done(new Error('TriggeredBy should be equal to simple_test_rule:/sample/uri')); | ||
} else { | ||
@@ -165,3 +180,3 @@ done(); | ||
'content-type': 'application/json', | ||
'x-request-id': common.SAMPLE_REQUEST_ID | ||
'x-request-id': common.SAMPLE_REQUEST_ID, | ||
} | ||
@@ -172,3 +187,17 @@ }) | ||
'derived_field': 'test' | ||
}).times(3).reply(500, {}); | ||
}) | ||
.matchHeader('x-triggered-by', 'simple_test_rule:/sample/uri') | ||
.reply(500, {}) | ||
.post('/', { | ||
'test_field_name': 'test_field_value', | ||
'derived_field': 'test' | ||
}) | ||
.matchHeader('x-triggered-by', 'simple_test_rule:/sample/uri,change-prop.retry.simple_test_rule:/sample/uri') | ||
.reply(500, {}) | ||
.post('/', { | ||
'test_field_name': 'test_field_value', | ||
'derived_field': 'test' | ||
}) | ||
.matchHeader('x-triggered-by', 'simple_test_rule:/sample/uri,change-prop.retry.simple_test_rule:/sample/uri,change-prop.retry.simple_test_rule:/sample/uri') | ||
.reply(500, {}); | ||
@@ -189,3 +218,4 @@ return producer.sendAsync([{ | ||
'content-type': 'application/json', | ||
'x-request-id': common.SAMPLE_REQUEST_ID | ||
'x-request-id': common.SAMPLE_REQUEST_ID, | ||
'x-triggered-by': 'simple_test_rule:/sample/uri' | ||
} | ||
@@ -196,3 +226,5 @@ }) | ||
'derived_field': 'test' | ||
}).reply(404, {}); | ||
}) | ||
.matchHeader('x-triggered-by', 'simple_test_rule:/sample/uri') | ||
.reply(404, {}); | ||
@@ -213,3 +245,4 @@ return producer.sendAsync([{ | ||
'content-type': 'application/json', | ||
'x-request-id': common.SAMPLE_REQUEST_ID | ||
'x-request-id': common.SAMPLE_REQUEST_ID, | ||
'x-triggered-by': 'simple_test_rule:/sample/uri' | ||
} | ||
@@ -220,3 +253,5 @@ }) | ||
'derived_field': 'test' | ||
}).reply(200, {}); | ||
}) | ||
.matchHeader('x-triggered-by', 'simple_test_rule:/sample/uri') | ||
.reply(200, {}); | ||
@@ -243,3 +278,5 @@ return producer.sendAsync([{ | ||
'derived_field': 'test' | ||
}).times(2).reply({}); | ||
}) | ||
.matchHeader('x-triggered-by', 'test_dc.kafka_producing_rule:/sample/uri,simple_test_rule:/sample/uri') | ||
.times(2).reply({}); | ||
@@ -276,3 +313,6 @@ return producer.sendAsync([{ | ||
}) | ||
.get('/api/rest_v1/page/html/Some_Page').times(2).reply(200) | ||
.get('/api/rest_v1/page/html/Some_Page') | ||
.matchHeader('x-triggered-by', 'mediawiki.revision_create:/sample/uri,resource_change:https://en.wikipedia.org/wiki/Some_Page') | ||
.times(2) | ||
.reply(200) | ||
.post('/w/api.php', { | ||
@@ -293,3 +333,6 @@ format: 'json', | ||
}) | ||
.get('/api/rest_v1/page/html/Some_Page').reply(200); | ||
.get('/api/rest_v1/page/html/Some_Page') | ||
.matchHeader('x-triggered-by', 'mediawiki.revision_create:/sample/uri,resource_change:https://en.wikipedia.org/wiki/Some_Page') | ||
.reply(200); | ||
return producer.sendAsync([{ | ||
@@ -306,3 +349,34 @@ topic: 'test_dc.mediawiki.revision_create', | ||
it('Should emit valid messages to error topic', (done) => { | ||
// No need to emit new messages, we will use on from previous test | ||
kafkaFactory.newConsumer(kafkaFactory.newClient(), | ||
'change-prop.error', | ||
'change-prop-test-error-consumer') | ||
.then((errorConsumer) => { | ||
errorConsumer.once('message', (message) => { | ||
try { | ||
const ajv = new Ajv(); | ||
const validate = ajv.compile(errorSchema); | ||
var valid = validate(JSON.parse(message.value)); | ||
if (!valid) { | ||
done(new assert.AssertionError({ | ||
message: ajv.errorsText(validate.errors) | ||
})); | ||
} else { | ||
done(); | ||
} | ||
} catch(e) { | ||
done(e); | ||
} | ||
}); | ||
}) | ||
.then(() => { | ||
return producer.sendAsync([{ | ||
topic: 'test_dc.mediawiki.revision_create', | ||
messages: [ 'not_a_json_message' ] | ||
}]); | ||
}); | ||
}); | ||
after(() => changeProp.stop()); | ||
}); |
@@ -42,3 +42,4 @@ "use strict"; | ||
reqheaders: { | ||
'cache-control': 'no-cache' | ||
'cache-control': 'no-cache', | ||
'x-triggered-by': 'resource_change:https://en.wikipedia.org/api/rest_v1/page/html/Main%20Page' | ||
} | ||
@@ -75,3 +76,4 @@ }) | ||
reqheaders: { | ||
'cache-control': 'no-cache' | ||
'cache-control': 'no-cache', | ||
'x-triggered-by': 'resource_change:https://en.wiktionary.org/api/rest_v1/page/html/Main%20Page' | ||
} | ||
@@ -108,3 +110,4 @@ }) | ||
reqheaders: { | ||
'cache-control': 'no-cache' | ||
'cache-control': 'no-cache', | ||
'x-triggered-by': 'resource_change:https://en.wikipedia.org/api/rest_v1/page/html/Main%20Page' | ||
} | ||
@@ -111,0 +114,0 @@ }) |
@@ -20,3 +20,4 @@ "use strict"; | ||
'test_dc.resource_change', | ||
'test_dc.change-prop.retry.resource_change' | ||
'test_dc.change-prop.retry.resource_change', | ||
'test_dc.change-prop.error' | ||
]; | ||
@@ -23,0 +24,0 @@ |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
107945
35
2079
120
+ Addedhyperswitch@0.5.3(transitive)
- Removedhyperswitch@0.4.10(transitive)
Updatedbluebird@^3.4.0
Updatedhyperswitch@^0.5.0
Updatedservice-runner@^1.3.0